Start line:  
End line:  

Snippet Preview

Snippet HTML Code

Stack Overflow Questions
   /*
    * Copyright 2011-2014 by the original author(s).
    *
    * Licensed under the Apache License, Version 2.0 (the "License");
    * you may not use this file except in compliance with the License.
    * You may obtain a copy of the License at
    *
    *     http://www.apache.org/licenses/LICENSE-2.0
    *
   * Unless required by applicable law or agreed to in writing, software
   * distributed under the License is distributed on an "AS IS" BASIS,
   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   * See the License for the specific language governing permissions and
   * limitations under the License.
   */
  package org.springframework.data.mongodb.core.convert;
  
  import java.util.Arrays;
  import java.util.HashSet;
  import java.util.List;
  import java.util.Map;
  
  import org.slf4j.Logger;
  import  org.springframework.beans.BeansException;
  import  org.springframework.context.ApplicationContext;
  import  org.springframework.context.ApplicationContextAware;
  import  org.springframework.core.CollectionFactory;
  import  org.springframework.core.convert.ConversionException;
  import  org.springframework.core.convert.ConversionService;
  import  org.springframework.core.convert.support.ConversionServiceFactory;
  import  org.springframework.expression.spel.standard.SpelExpressionParser;
  import  org.springframework.util.Assert;
  import  org.springframework.util.CollectionUtils;
  
  import com.mongodb.DBRef;

MongoConverter that uses a MappingContext to do sophisticated mapping of domain objects to DBObject.

Author(s):
Oliver Gierke
Jon Brisbin
Patrik Wasik
Thomas Darimont
Christoph Strobl
  
  public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware {
  
  	protected static final Logger LOGGER = LoggerFactory.getLogger(MappingMongoConverter.class);
  
  	protected final SpelExpressionParser spelExpressionParser = new SpelExpressionParser();
  	protected final QueryMapper idMapper;
  	protected final DbRefResolver dbRefResolver;
  	protected ApplicationContext applicationContext;
  	protected boolean useFieldAccessOnly = true;
  	protected MongoTypeMapper typeMapper;
  	protected String mapKeyDotReplacement = null;
  
  	private SpELContext spELContext;

Creates a new MappingMongoConverter given the new DbRefResolver and MappingContext.

Parameters:
mongoDbFactory must not be null.
mappingContext must not be null.
  
  	@SuppressWarnings("deprecation")
  	public MappingMongoConverter(DbRefResolver dbRefResolver,
  			MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentPropertymappingContext) {
 
 		super(ConversionServiceFactory.createDefaultConversionService());
 
 		Assert.notNull(dbRefResolver"DbRefResolver must not be null!");
 		Assert.notNull(mappingContext"MappingContext must not be null!");
 
 		this. = dbRefResolver;
 		this. = mappingContext;
 		this. = new QueryMapper(this);
 
 	}

Creates a new MappingMongoConverter given the new MongoDbFactory and MappingContext.

Deprecated:
use the constructor taking a DbRefResolver instead.
Parameters:
mongoDbFactory must not be null.
mappingContext must not be null.
 
 	public MappingMongoConverter(MongoDbFactory mongoDbFactory,
 			MappingContext<? extends MongoPersistentEntity<?>, MongoPersistentPropertymappingContext) {
 		this(new DefaultDbRefResolver(mongoDbFactory), mappingContext);
 	}

Configures the MongoTypeMapper to be used to add type information to DBObjects created by the converter and how to lookup type information from DBObjects when reading them. Uses a DefaultMongoTypeMapper by default. Setting this to null will reset the TypeMapper to the default one.

Parameters:
typeMapper the typeMapper to set
 
 	public void setTypeMapper(MongoTypeMapper typeMapper) {
 				) : typeMapper;
 	}
 
 	/*
 	 * (non-Javadoc)
 	 * @see org.springframework.data.mongodb.core.convert.MongoConverter#getTypeMapper()
 	 */
 		return this.;
 	}

Configure the characters dots potentially contained in a Map shall be replaced with. By default we don't do any translation but rather reject a Map with keys containing dots causing the conversion for the entire object to fail. If further customization of the translation is needed, have a look at potentiallyEscapeMapKey(String) as well as potentiallyUnescapeMapKey(String).

Parameters:
mapKeyDotReplacement the mapKeyDotReplacement to set
 
 	public void setMapKeyDotReplacement(String mapKeyDotReplacement) {
 		this. = mapKeyDotReplacement;
 	}
 
 	/*
 	 * (non-Javadoc)
 	 * @see org.springframework.data.convert.EntityConverter#getMappingContext()
 	 */
 		return ;
 	}

Configures whether to use field access only for entity mapping. Setting this to true will force the MongoConverter to not go through getters or setters even if they are present for getting and setting property values.

Parameters:
useFieldAccessOnly
 
 	public void setUseFieldAccessOnly(boolean useFieldAccessOnly) {
 		this. = useFieldAccessOnly;
 	}
 
 	/*
 	 * (non-Javadoc)
 	 * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext)
 	 */
 	public void setApplicationContext(ApplicationContext applicationContextthrows BeansException {
 
 		this. = applicationContext;
 		this. = new SpELContext(this.applicationContext);
 	}
 
 	/*
 	 * (non-Javadoc)
 	 * @see org.springframework.data.mongodb.core.core.MongoReader#read(java.lang.Class, com.mongodb.DBObject)
 	 */
 	public <S extends Object> S read(Class<S> clazzfinal DBObject dbo) {
 		return read(ClassTypeInformation.from(clazz), dbo);
 	}
 
 	protected <S extends Object> S read(TypeInformation<S> typeDBObject dbo) {
 		return read(typedbonull);
 	}
 
 	@SuppressWarnings("unchecked")
 	protected <S extends Object> S read(TypeInformation<S> typeDBObject dboObject parent) {
 
 		if (null == dbo) {
 			return null;
 		}
 
 		TypeInformation<? extends S> typeToUse = .readType(dbotype);
 		Class<? extends S> rawType = typeToUse.getType();
 
 		if (.hasCustomReadTarget(dbo.getClass(), rawType)) {
 			return .convert(dborawType);
 		}
 
 		if (DBObject.class.isAssignableFrom(rawType)) {
 			return (S) dbo;
 		}
 
 		if (typeToUse.isCollectionLike() && dbo instanceof BasicDBList) {
 			return (S) readCollectionOrArray(typeToUse, (BasicDBListdboparent);
 		}
 
 		if (typeToUse.isMap()) {
 			return (S) readMap(typeToUsedboparent);
 		}
 
 		// Retrieve persistent entity info
 				.getPersistentEntity(typeToUse);
 		if (persistentEntity == null) {
 			throw new MappingException("No mapping metadata found for " + rawType.getName());
 		}
 
 		return read(persistentEntitydboparent);
 	}
 
 			DBObject sourceDefaultSpELExpressionEvaluator evaluatorObject parent) {
 
 		MongoDbPropertyValueProvider provider = new MongoDbPropertyValueProvider(sourceevaluatorparent);
 				entityproviderparent);
 
 		return new ConverterAwareSpELExpressionParameterValueProvider(evaluatorparameterProvider,
 				parent);
 	}
 
 	private <S extends Object> S read(final MongoPersistentEntity<S> entityfinal DBObject dbofinal Object parent) {
 
 
 		ParameterValueProvider<MongoPersistentPropertyprovider = getParameterProvider(entitydboevaluatorparent);
 		S instance = instantiator.createInstance(entityprovider);
 
 		final BeanWrapper<MongoPersistentEntity<S>, S> wrapper = BeanWrapper.create(instance);
 		final S result = wrapper.getBean();
 
 		// Set properties not already set in the constructor
 
 				if (!dbo.containsField(prop.getFieldName()) || entity.isConstructorArgument(prop)) {
 					return;
 				}
 
 				Object obj = getValueInternal(propdboevaluatorresult);
 				wrapper.setProperty(propobj);
 			}
 		});
 
 		// Handle associations
 			public void doWithAssociation(Association<MongoPersistentPropertyassociation) {
 
 				MongoPersistentProperty inverseProp = association.getInverse();
 
 				Object obj = .resolveDbRef(inversePropnew DbRefResolverCallback() {
 
 					public Object resolve(MongoPersistentProperty property) {
 						return getValueInternal(propertydboevaluatorparent);
 					}
 				});
 
 				wrapper.setProperty(inversePropobj);
 			}
 		});
 
 		return result;
 	}
 
 	/* 
 	 * (non-Javadoc)
 	 * @see org.springframework.data.mongodb.core.convert.MongoWriter#toDBRef(java.lang.Object, org.springframework.data.mongodb.core.mapping.MongoPersistentProperty)
 	 */
 	public DBRef toDBRef(Object objectMongoPersistentProperty referingProperty) {
 
 
 		if (referingProperty != null) {
 			annotation = referingProperty.getDBRef();
 			Assert.isTrue(annotation != null"The referenced property has to be mapped with @DBRef!");
 		}
 
 		return createDBRef(objectreferingProperty);
 	}

Root entry method into write conversion. Adds a type discriminator to the DBObject. Shouldn't be called for nested conversions.

See also:
org.springframework.data.mongodb.core.core.convert.MongoWriter.write(java.lang.Object, com.mongodb.DBObject)
 
 	public void write(final Object objfinal DBObject dbo) {
 
 		if (null == obj) {
 			return;
 		}
 
 		boolean handledByCustomConverter = .getCustomWriteTarget(obj.getClass(), DBObject.class) != null;
 		TypeInformation<? extends Objecttype = ClassTypeInformation.from(obj.getClass());
 
 		if (!handledByCustomConverter && !(dbo instanceof BasicDBList)) {
 			.writeType(typedbo);
 		}
 
 		writeInternal(objdbotype);
 	}

Internal write conversion method which should be used for nested invocations.

Parameters:
obj
dbo
 
 	@SuppressWarnings("unchecked")
 	protected void writeInternal(final Object objfinal DBObject dbofinal TypeInformation<?> typeHint) {
 
 		if (null == obj) {
 			return;
 		}
 
 		Class<?> customTarget = .getCustomWriteTarget(obj.getClass(), DBObject.class);
 
 		if (customTarget != null) {
 			DBObject result = .convert(objDBObject.class);
 			dbo.putAll(result);
 			return;
 		}
 
 		if (Map.class.isAssignableFrom(obj.getClass())) {
 			return;
 		}
 
 		if (Collection.class.isAssignableFrom(obj.getClass())) {
 			return;
 		}
 
 		writeInternal(objdboentity);
 		addCustomTypeKeyIfNecessary(typeHintobjdbo);
 	}
 
 	protected void writeInternal(Object objfinal DBObject dboMongoPersistentEntity<?> entity) {
 
 		if (obj == null) {
 			return;
 		}
 
 		if (null == entity) {
 			throw new MappingException("No mapping metadata found for entity of type " + obj.getClass().getName());
 		}
 
 		final BeanWrapper<MongoPersistentEntity<Object>, Objectwrapper = BeanWrapper.create(obj);
 		final MongoPersistentProperty idProperty = entity.getIdProperty();
 
 		if (!dbo.containsField("_id") && null != idProperty) {
 
 			boolean fieldAccessOnly = idProperty.usePropertyAccess() ? false : ;
 
 			try {
 				Object id = wrapper.getProperty(idPropertyObject.classfieldAccessOnly);
 				dbo.put("_id".convertId(id));
 			} catch (ConversionException ignored) {}
 		}
 
 		// Write the properties
 
 				if (prop.equals(idProperty)) {
 					return;
 				}
 
 				boolean fieldAccessOnly = prop.usePropertyAccess() ? false : ;
 
 				Object propertyObj = wrapper.getProperty(propprop.getType(), fieldAccessOnly);
 
 				if (null != propertyObj) {
 					if (!.isSimpleType(propertyObj.getClass())) {
 						writePropertyInternal(propertyObjdboprop);
 					} else {
 						writeSimpleInternal(propertyObjdboprop);
 					}
 				}
 			}
 		});
 
 			public void doWithAssociation(Association<MongoPersistentPropertyassociation) {
 				MongoPersistentProperty inverseProp = association.getInverse();
 				Class<?> type = inverseProp.getType();
 				Object propertyObj = wrapper.getProperty(inverseProptype);
 				if (null != propertyObj) {
 					writePropertyInternal(propertyObjdboinverseProp);
 				}
 			}
 		});
 	}
 
 	@SuppressWarnings({ "unchecked" })
 	protected void writePropertyInternal(Object objDBObject dboMongoPersistentProperty prop) {
 
 		if (obj == null) {
 			return;
 		}
 
 		DBObjectAccessor accessor = new DBObjectAccessor(dbo);
 
 		TypeInformation<?> valueType = ClassTypeInformation.from(obj.getClass());
 		TypeInformation<?> type = prop.getTypeInformation();
 
 		if (valueType.isCollectionLike()) {
 			DBObject collectionInternal = createCollection(asCollection(obj), prop);
 			accessor.put(propcollectionInternal);
 			return;
 		}
 
 		if (valueType.isMap()) {
 			DBObject mapDbObj = createMap((Map<ObjectObject>) objprop);
 			accessor.put(propmapDbObj);
 			return;
 		}
 
 		if (prop.isDbReference()) {
 			DBRef dbRefObj = createDBRef(objprop);
 			if (null != dbRefObj) {
 				accessor.put(propdbRefObj);
 				return;
 			}
 		}
 
 		// Lookup potential custom target type
 		Class<?> basicTargetType = .getCustomWriteTarget(obj.getClass(), null);
 
 		if (basicTargetType != null) {
 			accessor.put(prop.convert(objbasicTargetType));
 			return;
 		}
 
 		Object existingValue = accessor.get(prop);
 		BasicDBObject propDbObj = existingValue instanceof BasicDBObject ? (BasicDBObjectexistingValue
 				: new BasicDBObject();
 		addCustomTypeKeyIfNecessary(typeobjpropDbObj);
 
 		MongoPersistentEntity<?> entity = isSubtype(prop.getType(), obj.getClass()) ? 
 
 		writeInternal(objpropDbObjentity);
 		accessor.put(proppropDbObj);
 	}
 
 	private boolean isSubtype(Class<?> leftClass<?> right) {
 		return left.isAssignableFrom(right) && !left.equals(right);
 	}

Returns given object as Collection. Will return the Collection as is if the source is a Collection already, will convert an array into a Collection or simply create a single element collection for everything else.

Parameters:
source
Returns:
 
 	private static Collection<?> asCollection(Object source) {
 
 		if (source instanceof Collection) {
 			return (Collection<?>) source;
 		}
 
 		return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source);
 	}

Writes the given Collection using the given MongoPersistentProperty information.

Parameters:
collection must not be null.
property must not be null.
Returns:
 
 	protected DBObject createCollection(Collection<?> collectionMongoPersistentProperty property) {
 
 		if (!property.isDbReference()) {
 			return writeCollectionInternal(collectionproperty.getTypeInformation(), new BasicDBList());
 		}
 
 		BasicDBList dbList = new BasicDBList();
 
 		for (Object element : collection) {
 
 			if (element == null) {
 				continue;
 			}
 
 			DBRef dbRef = createDBRef(elementproperty);
 			dbList.add(dbRef);
 		}
 
 		return dbList;
 	}

Writes the given Map using the given MongoPersistentProperty information.

Parameters:
map must not null.
property must not be null.
Returns:
 
 	protected DBObject createMap(Map<ObjectObjectmapMongoPersistentProperty property) {
 
 		Assert.notNull(map"Given map must not be null!");
 		Assert.notNull(property"PersistentProperty must not be null!");
 
 		if (!property.isDbReference()) {
 			return writeMapInternal(mapnew BasicDBObject(), property.getTypeInformation());
 		}
 
 		BasicDBObject dbObject = new BasicDBObject();
 
 		for (Map.Entry<ObjectObjectentry : map.entrySet()) {
 
 			Object key = entry.getKey();
 			Object value = entry.getValue();
 
 
 				String simpleKey = potentiallyEscapeMapKey(key.toString());
 				dbObject.put(simpleKeyvalue != null ? createDBRef(valueproperty) : null);
 
 			} else {
 				throw new MappingException("Cannot use a complex object as a key value.");
 			}
 		}
 
 		return dbObject;
 	}

Populates the given BasicDBList with values from the given Collection.

Parameters:
source the collection to create a BasicDBList for, must not be null.
type the TypeInformation to consider or null if unknown.
sink the BasicDBList to write to.
Returns:
 
 	private BasicDBList writeCollectionInternal(Collection<?> sourceTypeInformation<?> typeBasicDBList sink) {
 
 		TypeInformation<?> componentType = type == null ? null : type.getComponentType();
 
 		for (Object element : source) {
 
 			Class<?> elementType = element == null ? null : element.getClass();
 
 			if (elementType == null || .isSimpleType(elementType)) {
 			} else if (element instanceof Collection || elementType.isArray()) {
 				sink.add(writeCollectionInternal(asCollection(element), componentTypenew BasicDBList()));
 			} else {
 				BasicDBObject propDbObj = new BasicDBObject();
 				writeInternal(elementpropDbObjcomponentType);
 				sink.add(propDbObj);
 			}
 		}
 
 		return sink;
 	}

Writes the given Map to the given DBObject considering the given TypeInformation.

Parameters:
obj must not be null.
dbo must not be null.
propertyType must not be null.
Returns:
 
 	protected DBObject writeMapInternal(Map<ObjectObjectobjDBObject dboTypeInformation<?> propertyType) {
 
 		for (Map.Entry<ObjectObjectentry : obj.entrySet()) {
 			Object key = entry.getKey();
 			Object val = entry.getValue();
 				// Don't use conversion service here as removal of ObjectToString converter results in some primitive types not
 				// being convertable
 				String simpleKey = potentiallyEscapeMapKey(key.toString());
 				if (val == null || .isSimpleType(val.getClass())) {
 					writeSimpleInternal(valdbosimpleKey);
 				} else if (val instanceof Collection || val.getClass().isArray()) {
 					dbo.put(simpleKey,
 				} else {
 					DBObject newDbo = new BasicDBObject();
 					TypeInformation<?> valueTypeInfo = propertyType.isMap() ? propertyType.getMapValueType()
 					writeInternal(valnewDbovalueTypeInfo);
 					dbo.put(simpleKeynewDbo);
 				}
 			} else {
 				throw new MappingException("Cannot use a complex object as a key value.");
 			}
 		}
 
 		return dbo;
 	}

Potentially replaces dots in the given map key with the configured map key replacement if configured or aborts conversion if none is configured.

Parameters:
source
Returns:
See also:
setMapKeyDotReplacement(String)
 
 	protected String potentiallyEscapeMapKey(String source) {
 
 		if (!source.contains(".")) {
 			return source;
 		}
 
 		if ( == null) {
 			throw new MappingException(String.format("Map key %s contains dots but no replacement was configured! Make "
 					+ "sure map keys don't contain dots in the first place or configure an appropriate replacement!"source));
 		}
 
 		return source.replaceAll("\\.");
 	}

Translates the map key replacements in the given key just read with a dot in case a map key replacement has been configured.

Parameters:
source
Returns:
 
 	protected String potentiallyUnescapeMapKey(String source) {
 		return  == null ? source : source.replaceAll("\\.");
 	}

Adds custom type information to the given DBObject if necessary. That is if the value is not the same as the one given. This is usually the case if you store a subtype of the actual declared type of the property.

Parameters:
type
value must not be null.
dbObject must not be null.
 
 	protected void addCustomTypeKeyIfNecessary(TypeInformation<?> typeObject valueDBObject dbObject) {
 
 		TypeInformation<?> actualType = type != null ? type.getActualType() : null;
 		Class<?> reference = actualType == null ? Object.class : actualType.getType();
 
 		boolean notTheSameClass = !value.getClass().equals(reference);
 		if (notTheSameClass) {
 			.writeType(value.getClass(), dbObject);
 		}
 	}

Writes the given simple value to the given DBObject. Will store enum names for enum values.

Parameters:
value
dbObject must not be null.
key must not be null.
 
 	private void writeSimpleInternal(Object valueDBObject dbObjectString key) {
 		dbObject.put(keygetPotentiallyConvertedSimpleWrite(value));
 	}
 
 	private void writeSimpleInternal(Object valueDBObject dbObjectMongoPersistentProperty property) {
 		DBObjectAccessor accessor = new DBObjectAccessor(dbObject);
 		accessor.put(propertygetPotentiallyConvertedSimpleWrite(value));
 	}

Checks whether we have a custom conversion registered for the given value into an arbitrary simple Mongo type. Returns the converted value if so. If not, we perform special enum handling or simply return the value as is.

Parameters:
value
Returns:
 
 
 		if (value == null) {
 			return null;
 		}
 
 		Class<?> customTarget = .getCustomWriteTarget(value.getClass(), null);
 
 		if (customTarget != null) {
 			return .convert(valuecustomTarget);
 		} else {
 			return Enum.class.isAssignableFrom(value.getClass()) ? ((Enum<?>) value).name() : value;
 		}
 	}

Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies Enum handling or returns the value as is.

Parameters:
value
target must not be null.
Returns:
 
 	@SuppressWarnings({ "rawtypes""unchecked" })
 	private Object getPotentiallyConvertedSimpleRead(Object valueClass<?> target) {
 
 		if (value == null || target == null) {
 			return value;
 		}
 
 		if (.hasCustomReadTarget(value.getClass(), target)) {
 			return .convert(valuetarget);
 		}
 
 		if (Enum.class.isAssignableFrom(target)) {
 			return Enum.valueOf((Class<Enum>) targetvalue.toString());
 		}
 
 		return target.isAssignableFrom(value.getClass()) ? value : .convert(valuetarget);
 	}
 
 	protected DBRef createDBRef(Object targetMongoPersistentProperty property) {
 
 		Assert.notNull(target);
 
 		if (target instanceof DBRef) {
 			return (DBReftarget;
 		}
 
 		targetEntity = targetEntity == null ? targetEntity = .getPersistentEntity(property) : targetEntity;
 
 		if (null == targetEntity) {
 			throw new MappingException("No mapping metadata found for " + target.getClass());
 		}
 
 		MongoPersistentProperty idProperty = targetEntity.getIdProperty();
 
 		if (idProperty == null) {
 			throw new MappingException("No id property found on class " + targetEntity.getType());
 		}
 
 		Object id = null;
 
 		if (target.getClass().equals(idProperty.getType())) {
 			id = target;
 		} else {
 			BeanWrapper<MongoPersistentEntity<Object>, Objectwrapper = BeanWrapper.create(target);
 			id = wrapper.getProperty(idPropertyObject.class);
 		}
 
 		if (null == id) {
 			throw new MappingException("Cannot create a reference to an object with a NULL id.");
 		}
 
 		return .createDbRef(property == null ? null : property.getDBRef(), targetEntity,
 	}
 
 			Object parent) {
 
 		return provider.getPropertyValue(prop);
 	}

Reads the given BasicDBList into a collection of the given TypeInformation.

Parameters:
targetType must not be null.
sourceValue must not be null.
Returns:
the converted Collection or array, will never be null.
 
 	@SuppressWarnings("unchecked")
 	private Object readCollectionOrArray(TypeInformation<?> targetTypeBasicDBList sourceValueObject parent) {
 
 		Assert.notNull(targetType);
 
 		Class<?> collectionType = targetType.getType();
 
 		if (sourceValue.isEmpty()) {
 			return getPotentiallyConvertedSimpleRead(new HashSet<Object>(), collectionType);
 		}
 
 		collectionType = Collection.class.isAssignableFrom(collectionType) ? collectionType : List.class;
 
 		Collection<Objectitems = targetType.getType().isArray() ? new ArrayList<Object>() : CollectionFactory
 				.createCollection(collectionTypesourceValue.size());
 		TypeInformation<?> componentType = targetType.getComponentType();
 		Class<?> rawComponentType = componentType == null ? null : componentType.getType();
 
 		for (int i = 0; i < sourceValue.size(); i++) {
 
 			Object dbObjItem = sourceValue.get(i);
 
 			if (dbObjItem instanceof DBRef) {
 				items.add(DBRef.class.equals(rawComponentType) ? dbObjItem : read(componentTypereadRef((DBRefdbObjItem),
 						parent));
 			} else if (dbObjItem instanceof DBObject) {
 				items.add(read(componentType, (DBObjectdbObjItemparent));
 			} else {
 				items.add(getPotentiallyConvertedSimpleRead(dbObjItemrawComponentType));
 			}
 		}
 
 		return getPotentiallyConvertedSimpleRead(itemstargetType.getType());
 	}

Reads the given DBObject into a Map. will recursively resolve nested Maps as well.

Parameters:
type the Map TypeInformation to be used to unmarshall this DBObject.
dbObject
Returns:
 
 	@SuppressWarnings("unchecked")
 	protected Map<ObjectObjectreadMap(TypeInformation<?> typeDBObject dbObjectObject parent) {
 
 		Assert.notNull(dbObject);
 
 		Class<?> mapType = .readType(dbObjecttype).getType();
 		Map<ObjectObjectmap = CollectionFactory.createMap(mapTypedbObject.keySet().size());
 		Map<StringObjectsourceMap = dbObject.toMap();
 
 		for (Entry<StringObjectentry : sourceMap.entrySet()) {
 			if (.isTypeKey(entry.getKey())) {
 				continue;
 			}
 
 
 			TypeInformation<?> keyTypeInformation = type.getComponentType();
 			if (keyTypeInformation != null) {
 				Class<?> keyType = keyTypeInformation.getType();
 				key = .convert(keykeyType);
 			}
 
 			Object value = entry.getValue();
 			TypeInformation<?> valueType = type.getMapValueType();
 			Class<?> rawValueType = valueType == null ? null : valueType.getType();
 
 			if (value instanceof DBObject) {
 				map.put(keyread(valueType, (DBObjectvalueparent));
 			} else if (value instanceof DBRef) {
 				map.put(keyDBRef.class.equals(rawValueType) ? value : read(valueTypereadRef((DBRefvalue)));
 			} else {
 				Class<?> valueClass = valueType == null ? null : valueType.getType();
 				map.put(keygetPotentiallyConvertedSimpleRead(valuevalueClass));
 			}
 		}
 
 		return map;
 	}
 
 	protected <T> List<?> unwrapList(BasicDBList dbListTypeInformation<T> targetType) {
 		List<ObjectrootList = new ArrayList<Object>();
 		for (int i = 0; i < dbList.size(); i++) {
 			Object obj = dbList.get(i);
 			if (obj instanceof BasicDBList) {
 				rootList.add(unwrapList((BasicDBListobjtargetType.getComponentType()));
 			} else if (obj instanceof DBObject) {
 				rootList.add(read(targetType, (DBObjectobj));
 			} else {
 				rootList.add(obj);
 			}
 		}
 		return rootList;
 	}
 
 	/*
 	 * (non-Javadoc)
 	 * @see org.springframework.data.mongodb.core.convert.MongoWriter#convertToMongoType(java.lang.Object, org.springframework.data.util.TypeInformation)
 	 */
 	@SuppressWarnings("unchecked")
 	public Object convertToMongoType(Object objTypeInformation<?> typeInformation) {
 
 		if (obj == null) {
 			return null;
 		}
 
 		if (target != null) {
 			return .convert(objtarget);
 		}
 
 			// Doesn't need conversion
 		}
 
 		TypeInformation<?> typeHint = typeInformation == null ? null : .;
 
 		if (obj instanceof BasicDBList) {
 			return maybeConvertList((BasicDBListobjtypeHint);
 		}
 
 		if (obj instanceof DBObject) {
 			DBObject newValueDbo = new BasicDBObject();
 			for (String vk : ((DBObjectobj).keySet()) {
 				Object o = ((DBObjectobj).get(vk);
 				newValueDbo.put(vkconvertToMongoType(otypeHint));
 			}
 			return newValueDbo;
 		}
 
 		if (obj instanceof Map) {
 			DBObject result = new BasicDBObject();
 			for (Map.Entry<ObjectObjectentry : ((Map<ObjectObject>) obj).entrySet()) {
 				result.put(entry.getKey().toString(), convertToMongoType(entry.getValue(), typeHint));
 			}
 			return result;
 		}
 
 		if (obj.getClass().isArray()) {
 			return maybeConvertList(Arrays.asList((Object[]) obj), typeHint);
 		}
 
 		if (obj instanceof Collection) {
 			return maybeConvertList((Collection<?>) objtypeHint);
 		}
 
 		DBObject newDbo = new BasicDBObject();
 		this.write(objnewDbo);
 
 		if (typeInformation == null) {
 			return removeTypeInfoRecursively(newDbo);
 		}
 
 		return !obj.getClass().equals(typeInformation.getType()) ? newDbo : removeTypeInfoRecursively(newDbo);
 	}
 
 	public BasicDBList maybeConvertList(Iterable<?> sourceTypeInformation<?> typeInformation) {
 
 		BasicDBList newDbl = new BasicDBList();
 		for (Object element : source) {
 			newDbl.add(convertToMongoType(elementtypeInformation));
 		}
 
 		return newDbl;
 	}

Removes the type information from the conversion result.

Parameters:
object
Returns:
 
 	private Object removeTypeInfoRecursively(Object object) {
 
 		if (!(object instanceof DBObject)) {
 			return object;
 		}
 
 		DBObject dbObject = (DBObjectobject;
 		String keyToRemove = null;
 		for (String key : dbObject.keySet()) {
 
 			if (.isTypeKey(key)) {
 				keyToRemove = key;
 			}
 
 			Object value = dbObject.get(key);
 			if (value instanceof BasicDBList) {
 				for (Object element : (BasicDBListvalue) {
 				}
 			} else {
 			}
 		}
 
 		if (keyToRemove != null) {
 			dbObject.removeField(keyToRemove);
 		}
 
 		return dbObject;
		private final DBObjectAccessor source;
		private final Object parent;
		public MongoDbPropertyValueProvider(DBObject sourceSpELContext factoryObject parent) {
			this(sourcenew DefaultSpELExpressionEvaluator(sourcefactory), parent);
			Assert.notNull(source);
			Assert.notNull(evaluator);
			this. = new DBObjectAccessor(source);
			this. = evaluator;
			this. = parent;
		/* 
		 * (non-Javadoc)
		 * @see org.springframework.data.convert.PropertyValueProvider#getPropertyValue(org.springframework.data.mapping.PersistentProperty)
		 */
		public <T> T getPropertyValue(MongoPersistentProperty property) {
			String expression = property.getSpelExpression();
			Object value = expression != null ? .evaluate(expression) : .get(property);
			if (value == null) {
				return null;
			return readValue(valueproperty.getTypeInformation(), );
	}

Extension of SpELExpressionParameterValueProvider to recursively trigger value conversion on the raw resolved SpEL value.

Author(s):
Oliver Gierke
		private final Object parent;

Creates a new ConverterAwareSpELExpressionParameterValueProvider.

Parameters:
evaluator must not be null.
conversionService must not be null.
delegate must not be null.
				ConversionService conversionServiceParameterValueProvider<MongoPersistentPropertydelegateObject parent) {
			super(evaluatorconversionServicedelegate);
			this. = parent;
		/* 
		 * (non-Javadoc)
		 * @see org.springframework.data.mapping.model.SpELExpressionParameterValueProvider#potentiallyConvertSpelValue(java.lang.Object, org.springframework.data.mapping.PreferredConstructor.Parameter)
		 */
		protected <T> T potentiallyConvertSpelValue(Object objectParameter<T, MongoPersistentPropertyparameter) {
			return readValue(objectparameter.getType(), );
	@SuppressWarnings("unchecked")
	private <T> T readValue(Object valueTypeInformation<?> typeObject parent) {
		Class<?> rawType = type.getType();
		if (.hasCustomReadTarget(value.getClass(), rawType)) {
			return (T) .convert(valuerawType);
else if (value instanceof DBRef) {
			return (T) (rawType.equals(DBRef.class) ? value : read(typereadRef((DBRefvalue), parent));
else if (value instanceof BasicDBList) {
			return (T) readCollectionOrArray(type, (BasicDBListvalueparent);
else if (value instanceof DBObject) {
			return (T) read(type, (DBObjectvalueparent);
else {
			return (T) getPotentiallyConvertedSimpleRead(valuerawType);
	}

Performs the fetch operation for the given DBRef.

Parameters:
ref
Returns:
		return ref.fetch();