in go/deepequal/deep_equals.go [51:227]
func deepValueEqual(v1, v2 reflect.Value, visited map[visit]bool, depth int) bool {
if !v1.IsValid() || !v2.IsValid() {
return v1.IsValid() == v2.IsValid()
}
if v1.Type() != v2.Type() {
return false
}
// We want to avoid putting more in the visited map than we need to.
// For any possible reference cycle that might be encountered,
// hard(t) needs to return true for at least one of the types in the cycle.
hard := func(k reflect.Kind) bool {
switch k {
case reflect.Map, reflect.Slice, reflect.Ptr, reflect.Interface:
return true
}
return false
}
if v1.CanAddr() && v2.CanAddr() && hard(v1.Kind()) {
addr1 := unsafe.Pointer(v1.UnsafeAddr())
addr2 := unsafe.Pointer(v2.UnsafeAddr())
if uintptr(addr1) > uintptr(addr2) {
// Canonicalize order to reduce number of entries in visited.
// Assumes non-moving garbage collector.
addr1, addr2 = addr2, addr1
}
// Short circuit if references are already seen.
typ := v1.Type()
v := visit{addr1, addr2, typ}
if visited[v] {
return true
}
// Remember for later.
visited[v] = true
}
switch v1.Kind() {
case reflect.Array:
for i := 0; i < v1.Len(); i++ {
if !deepValueEqual(v1.Index(i), v2.Index(i), visited, depth+1) {
return false
}
}
return true
case reflect.Slice:
if v1.IsNil() != v2.IsNil() {
return false
}
if v1.Len() != v2.Len() {
return false
}
if v1.Pointer() == v2.Pointer() {
return true
}
for i := 0; i < v1.Len(); i++ {
if !deepValueEqual(v1.Index(i), v2.Index(i), visited, depth+1) {
return false
}
}
return true
case reflect.Interface:
if v1.IsNil() || v2.IsNil() {
return v1.IsNil() == v2.IsNil()
}
return deepValueEqual(v1.Elem(), v2.Elem(), visited, depth+1)
case reflect.Ptr:
if v1.Pointer() == v2.Pointer() {
return true
}
return deepValueEqual(v1.Elem(), v2.Elem(), visited, depth+1)
case reflect.Struct:
// https://stackoverflow.com/a/43918797 tells us that we can make an addressable
// copy of the struct (if it isn't addressable already)
// This will allow us to compare unexported fields later using unsafe.Pointer
// Note, we shouldn't just blindly make a new, addressable copy of the element,
// because that defeats the infinite recursion protection from above.
v1copy := v1
if !v1.CanAddr() {
v1copy = reflect.New(v1.Type()).Elem()
v1copy.Set(v1)
}
v2copy := v2
if !v2.CanAddr() {
v2copy = reflect.New(v2.Type()).Elem()
v2copy.Set(v2)
}
// If Equal is defined on the struct, call it and use the result if valid.
// This is especially helpful in Go 1.9, with the introduction of monotonic
// times. reflect.DeepEqual() would compare the (unexported) monotonic field
// and incorrectly deduce times as being not equal. By calling the Equal
// method when available, we allow struct writers to control the DeepEqual
// behavior.
// Do PtrTo to expose all methods on the type and pointers of the type (the
// former comes with the latter)
tp := reflect.PtrTo(v1.Type())
if equal, found := tp.MethodByName("Equal"); found {
if equal.Type.NumIn() == 2 && // Two inputs (caller + object checking for equality)
equal.Type.NumOut() == 1 && // One output, which is exactly a bool
equal.Type.Out(0).Kind() == reflect.Bool {
// make sure v2copy can be passed in to the function in the second slot,
// using a pointer if necessary.
arg := v2copy
if equal.Type.In(1).AssignableTo(arg.Addr().Type()) {
arg = arg.Addr()
}
if equal.Type.In(1).AssignableTo(arg.Type()) {
// Actually call the function. Since we requested methods with a pointer receiver
// we need to use Addr() on the caller object.
retvals := equal.Func.Call([]reflect.Value{v1copy.Addr(), arg})
if len(retvals) == 1 {
if isEqual, ok := retvals[0].Interface().(bool); ok {
return isEqual
}
}
}
// If any of that failed, the Equal method we found isn't something we can use, so
// fallthrough to the regular field by field comparison.
}
}
for i, n := 0, v1.NumField(); i < n; i++ {
// Then, we can use unsafe.Pointer to get access to the field, even if it is
// unexported.
f1 := v1copy.Field(i)
if !f1.CanInterface() {
f1 = reflect.NewAt(f1.Type(), unsafe.Pointer(f1.UnsafeAddr())).Elem()
}
f2 := v2copy.Field(i)
if !f2.CanInterface() {
f2 = reflect.NewAt(f2.Type(), unsafe.Pointer(f2.UnsafeAddr())).Elem()
}
if !deepValueEqual(f1, f2, visited, depth+1) {
return false
}
}
return true
case reflect.Map:
if v1.IsNil() != v2.IsNil() {
return false
}
if v1.Len() != v2.Len() {
return false
}
if v1.Pointer() == v2.Pointer() {
return true
}
for _, k := range v1.MapKeys() {
val1 := v1.MapIndex(k)
val2 := v2.MapIndex(k)
if !val1.IsValid() || !val2.IsValid() || !deepValueEqual(v1.MapIndex(k), v2.MapIndex(k), visited, depth+1) {
return false
}
}
return true
case reflect.Func:
if v1.IsNil() && v2.IsNil() {
return true
}
// Can't do better than this:
return false
default:
// Normal equality suffices
// This is different from the reflect.DeepEqual because we don't have access to
// some unexported functions in the reflect package that let us get access
// to unexported variables. Because of the unsafe.Pointer() logic done above
// we can safely make a call to Interface(), which would typically fail for
// unexported fields.
return v1.Interface() == v2.Interface()
}
}