From d1340ebb9a6b45319d8fa35a24cb0ece22f7b6ce Mon Sep 17 00:00:00 2001 From: ivolucas Date: Mon, 27 Mar 2017 12:07:45 +0100 Subject: [PATCH 1/5] Add a Transcoder that uses java serializer + Gzip --- javagzip-serializer/README | 1 + javagzip-serializer/pom.xml | 69 +++++ .../javalgzip/JavaGzipTranscoder.java | 269 ++++++++++++++++++ .../javalgzip/JavaGzipTranscoderFactory.java | 111 ++++++++ 4 files changed, 450 insertions(+) create mode 100644 javagzip-serializer/README create mode 100644 javagzip-serializer/pom.xml create mode 100644 javagzip-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalgzip/JavaGzipTranscoder.java create mode 100644 javagzip-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalgzip/JavaGzipTranscoderFactory.java diff --git a/javagzip-serializer/README b/javagzip-serializer/README new file mode 100644 index 00000000..b5ad8607 --- /dev/null +++ b/javagzip-serializer/README @@ -0,0 +1 @@ +A serialization strategy for the memcached-session-manager (http://code.google.com/p/memcached-session-manager ) that uses native Java Serialization and uses GZIP to reduce the size of the session diff --git a/javagzip-serializer/pom.xml b/javagzip-serializer/pom.xml new file mode 100644 index 00000000..5ba4a4c3 --- /dev/null +++ b/javagzip-serializer/pom.xml @@ -0,0 +1,69 @@ + + 4.0.0 + + de.javakaffee.msm + memcached-session-manager-project + 2.1.2-SNAPSHOT + + + de.javakaffee.msm + msm-javagzip-serializer + memcached-session-manager javagzip-serializer + 2.1.2-SNAPSHOT + jar + + + + de.javakaffee.msm + memcached-session-manager + ${project.version} + provided + + + org.apache.tomcat + juli + ${tomcat-version} + provided + + + org.apache.tomcat + coyote + ${tomcat-version} + provided + + + org.apache.tomcat + catalina + ${tomcat-version} + provided + + + + + + + + + de.javakaffee.msm + memcached-session-manager + ${project.version} + test-jar + test + + + de.javakaffee.msm + memcached-session-manager-tc7 + ${project.version} + test + + + commons-lang + commons-lang + 2.6 + test + + + + + + diff --git a/javagzip-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalgzip/JavaGzipTranscoder.java b/javagzip-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalgzip/JavaGzipTranscoder.java new file mode 100644 index 00000000..e205fc3c --- /dev/null +++ b/javagzip-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalgzip/JavaGzipTranscoder.java @@ -0,0 +1,269 @@ +package de.javakaffee.web.msm.serializer.javalgzip; + +import de.javakaffee.web.msm.JavaSerializationTranscoder; +import de.javakaffee.web.msm.MemcachedBackupSession; +import de.javakaffee.web.msm.SessionAttributesTranscoder; +import de.javakaffee.web.msm.TranscoderDeserializationException; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.NotSerializableException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.ConcurrentModificationException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; + +import org.apache.catalina.session.StandardSession; +import org.apache.catalina.util.CustomObjectInputStream; +import org.apache.juli.logging.Log; +import org.apache.juli.logging.LogFactory; + +/** + * A {@link SessionAttributesTranscoder} that serializes catalina + * {@link StandardSession}s using java serialization (and the serialization + * logic of {@link StandardSession} as found in + * {@link StandardSession#writeObjectData(ObjectOutputStream)} and + * {@link StandardSession#readObjectData(ObjectInputStream)}). It is custom + * implementation of {@link JavaSerializationTranscoder} that uses + * {@link GZIPInputStream} and {@link GZIPOutputStream} to compress and + * decompress the session. + * + * @author ilucas + */ +public class JavaGzipTranscoder implements SessionAttributesTranscoder { + + private static final Log LOG = LogFactory.getLog(JavaGzipTranscoder.class); + + private static final String EMPTY_ARRAY[] = new String[0]; + + private final boolean retryActive; + private final int retryNumber; + private final int retryInterval; + /** + * The dummy attribute value serialized when a NotSerializableException is + * encountered in writeObject(). + */ + protected static final String NOT_SERIALIZED = "___NOT_SERIALIZABLE_EXCEPTION___"; + + private final ClassLoader classLoader; + + /** + * Constructor. + * + * @param manager the manager + */ + public JavaGzipTranscoder() { + this(null, false, 1, 0); + } + + /** + * Constructor. + * + * @param classLoader + * @param compresser + * @param retryActive + * @param retryNumber + * @param retryInterval + */ + public JavaGzipTranscoder(ClassLoader classLoader, boolean retryActive, int retryNumber, int retryInterval) { + this.retryActive = retryActive; + this.retryNumber = retryNumber; + this.retryInterval = retryInterval; + this.classLoader = classLoader; + + } + + /** + * {@inheritDoc} + */ + @Override + public byte[] serializeAttributes(final MemcachedBackupSession session, final ConcurrentMap attributes) { + if (attributes == null) { + throw new NullPointerException("Can't serialize null"); + } + if (retryActive) { + int i = 0; + ConcurrentModificationException concurrentModificationException = null; + do { + try { + return writeAttributes(session, attributes); + } catch (ConcurrentModificationException ex) { + concurrentModificationException = ex; + LOG.warn("ConcurrentModificationException on write attributes of session " + session.getIdInternal() + " retry " + i); + if (retryInterval > 0) { + try { + Thread.sleep(retryInterval); + } catch (InterruptedException interruptedException) { + LOG.debug("Interrupted on wating for try to writeAttributes: " + interruptedException); + } + } + } + } while (i++ < retryNumber); + throw concurrentModificationException; + } else { + + return writeAttributes(session, attributes); + + } + } + + private byte[] writeAttributes(final MemcachedBackupSession session, final ConcurrentMap attributes) throws IllegalArgumentException { + ByteArrayOutputStream bos = null; + OutputStream gzs = null; + ObjectOutputStream oos = null; + try { + bos = new ByteArrayOutputStream(); + gzs = new GZIPOutputStream(bos); + oos = new ObjectOutputStream(gzs); + writeAttributes(session, attributes, oos); + closeSilently(oos); + closeSilently(gzs); + return bos.toByteArray(); + } catch (final IOException e) { + throw new IllegalArgumentException("Non-serializable object", e); + } finally { + closeSilently(oos); + closeSilently(gzs); + closeSilently(bos); + } + } + + private void writeAttributes(final MemcachedBackupSession session, final Map attributes, + final ObjectOutputStream oos) throws IOException { + + // Accumulate the names of serializable and non-serializable attributes + final String keys[] = attributes.keySet().toArray(EMPTY_ARRAY); + final List saveNames = new ArrayList(); + final List saveValues = new ArrayList(); + for (int i = 0; i < keys.length; i++) { + final Object value = attributes.get(keys[i]); + if (value == null) { + continue; + } else if (value instanceof Serializable) { + saveNames.add(keys[i]); + saveValues.add(value); + } else { + if (LOG.isDebugEnabled()) { + LOG.debug("Ignoring attribute '" + keys[i] + "' as it does not implement Serializable"); + } + } + } + + // Serialize the attribute count and the Serializable attributes + final int n = saveNames.size(); + oos.writeObject(Integer.valueOf(n)); + for (int i = 0; i < n; i++) { + oos.writeObject(saveNames.get(i)); + if (LOG.isDebugEnabled()) { + LOG.debug(" start storing attribute '" + saveNames.get(i) + "'"); + } + try { + oos.writeObject(saveValues.get(i)); + if (LOG.isDebugEnabled()) { + LOG.debug(" stored attribute '" + saveNames.get(i) + "' with value '" + saveValues.get(i) + "'"); + } + } catch (final NotSerializableException e) { + LOG.warn("Attribute " + saveNames.get(i) + " of session " + session.getIdInternal() + " is not serializable", e); + oos.writeObject(NOT_SERIALIZED); + if (LOG.isDebugEnabled()) { + LOG.debug(" storing attribute '" + saveNames.get(i) + "' with value NOT_SERIALIZED"); + } + } catch (final ConcurrentModificationException concurrentModificationException) { + // throw to catch and retry + throw concurrentModificationException; + } catch (final Exception e) { + LOG.warn("Attribute " + saveNames.get(i) + " of session " + session.getIdInternal() + " was not serialized correctly", e); + throw new IOException("Attribute " + saveNames.get(i) + " of session " + session.getIdInternal() + " was not serialized correctly, stream invalidated", e); + } + + } + + } + + /** + * Get the object represented by the given serialized bytes. + * + * @param in the bytes to deserialize + * @return the resulting object + */ + @Override + public ConcurrentMap deserializeAttributes(final byte[] in) { + ByteArrayInputStream bis = null; + InputStream gzs = null; + ObjectInputStream ois = null; + try { + bis = new ByteArrayInputStream(in); + gzs = new GZIPInputStream(bis); + ois = createObjectInputStream(gzs); + + final ConcurrentMap attributes = new ConcurrentHashMap(); + final int n = ((Integer) ois.readObject()).intValue(); + for (int i = 0; i < n; i++) { + final String name = (String) ois.readObject(); + if (LOG.isDebugEnabled()) { + LOG.debug(" start reading attribute '" + name + "'"); + } + final Object value = ois.readObject(); + if ((value instanceof String) && (value.equals(NOT_SERIALIZED))) { + continue; + } + if (LOG.isDebugEnabled()) { + LOG.debug(" loading attribute '" + name + "' with value '" + value + "' to map"); + } + attributes.put(name, value); + } + + return attributes; + } catch (final ClassNotFoundException e) { + LOG.warn("Caught CNFE decoding " + in.length + " bytes of data", e); + throw new TranscoderDeserializationException("Caught CNFE decoding data", e); + } catch (final IOException e) { + LOG.warn("Caught IOException decoding " + in.length + " bytes of data", e); + throw new TranscoderDeserializationException("Caught IOException decoding data", e); + } finally { + closeSilently(bis); + closeSilently(gzs); + closeSilently(ois); + } + } + + private ObjectInputStream createObjectInputStream(final InputStream bis) throws IOException { + final ObjectInputStream ois; + if (classLoader != null) { + ois = new CustomObjectInputStream(bis, classLoader); + } else { + ois = new ObjectInputStream(bis); + } + return ois; + } + + private void closeSilently(final OutputStream os) { + if (os != null) { + try { + os.close(); + } catch (final IOException f) { + LOG.debug("Error on closeSilently OutputStream " + os, f); + } + } + } + + private void closeSilently(final InputStream is) { + if (is != null) { + try { + is.close(); + } catch (final IOException f) { + LOG.debug("Error on closeSilently InputStream:" + is, f); + } + } + } + +} diff --git a/javagzip-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalgzip/JavaGzipTranscoderFactory.java b/javagzip-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalgzip/JavaGzipTranscoderFactory.java new file mode 100644 index 00000000..be75bfc7 --- /dev/null +++ b/javagzip-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalgzip/JavaGzipTranscoderFactory.java @@ -0,0 +1,111 @@ +package de.javakaffee.web.msm.serializer.javalgzip; + +import de.javakaffee.web.msm.MemcachedSessionService; +import de.javakaffee.web.msm.SessionAttributesTranscoder; +import de.javakaffee.web.msm.TranscoderFactory; +import org.apache.juli.logging.Log; +import org.apache.juli.logging.LogFactory; + +/** +* A {@link TranscoderFactory} that creates {@link JavaGzipTranscoder} instances. + * @author ilucas + */ +public class JavaGzipTranscoderFactory implements TranscoderFactory { + + private static final Log LOG = LogFactory.getLog(JavaGzipTranscoderFactory.class ); + + /** + * Default number of tries to serialize attributes if a + * ConcurrentModificationException is thrown + */ + private static final int COCURRENT_MODIFICATION_EXCEPTION_RETRY_NUMBER = 3; + + /** + * Default interval in retries (ms) + */ + private static final int COCURRENT_MODIFICATION_EXCEPTION_RETRY_TIME = 0; + + + + /** + * Property name of retryActive + */ + private static final String COCURRENT_MODIFICATION_EXCEPTION_RETRY_ACTIVE_PROPERTY = "msm.JavaGzip.retryActive"; + private static final String COCURRENT_MODIFICATION_EXCEPTION_RETRY_NUMBER_PROPERTY = "msm.JavaGzip.retryNumber"; + private static final String COCURRENT_MODIFICATION_EXCEPTION_RETRY_INTERVAL_PROPERTY = "msm.JavaGzip.retryInterval"; + + + private final boolean retryActive; + private final int retryNumber; + private final int retryInterval; + + public JavaGzipTranscoderFactory() { + this.retryActive = getSysPropValue(COCURRENT_MODIFICATION_EXCEPTION_RETRY_ACTIVE_PROPERTY, false); + this.retryNumber = getSysPropValue(COCURRENT_MODIFICATION_EXCEPTION_RETRY_NUMBER_PROPERTY, COCURRENT_MODIFICATION_EXCEPTION_RETRY_NUMBER); + this.retryInterval = getSysPropValue(COCURRENT_MODIFICATION_EXCEPTION_RETRY_INTERVAL_PROPERTY, COCURRENT_MODIFICATION_EXCEPTION_RETRY_TIME); + } + + + /** + * {@inheritDoc} + */ + @Override + public SessionAttributesTranscoder createTranscoder( final MemcachedSessionService.SessionManager manager ) { + return new JavaGzipTranscoder( manager.getContainerClassLoader(),retryActive,retryNumber,retryInterval ); + } + + /** + * If copyCollectionsForSerialization is set to true, + * an {@link UnsupportedOperationException} will be thrown, as java serialization + * cannot be changed and it does not copy collections for serialization. + * + * @param copyCollectionsForSerialization the copyCollectionsForSerialization value + */ + @Override + public void setCopyCollectionsForSerialization( final boolean copyCollectionsForSerialization ) { + if ( copyCollectionsForSerialization ) { + throw new UnsupportedOperationException( + "Java serialization cannot be changed - it does not copy collections for serialization." ); + } + } + + /** + * Throws an {@link UnsupportedOperationException}, as java serialization + * does not support custom xml format. + * + * @param customConverterClassNames a list of class names or null. + */ + @Override + public void setCustomConverterClassNames( final String[] customConverterClassNames ) { + if ( customConverterClassNames != null && customConverterClassNames.length > 0 ) { + throw new UnsupportedOperationException( "Java serialization does not support custom converter." ); + } + } + + private int getSysPropValue( final String propName, final int defaultValue ) { + int value = defaultValue; + final String propValue = System.getProperty( propName ); + if ( propValue != null ) { + try { + value = Integer.parseInt( propValue ); + } catch( final NumberFormatException e ) { + LOG.warn( "Could not parse system property " + propName + " using default value " +defaultValue + " : " + e ); + } + } + return value; + } + + private boolean getSysPropValue( final String propName, final boolean defaultValue ) { + boolean value = defaultValue; + final String propValue = System.getProperty( propName ); + if ( propValue != null ) { + try { + value = Boolean.parseBoolean( propValue ); + } catch( final NumberFormatException e ) { + LOG.warn( "Could not parse system property " + propName + " using default value " +defaultValue + " : " + e ); + } + } + return value; + } + +} From 77b6de9b0d8672e5a71cac05d27a31e12826c3b0 Mon Sep 17 00:00:00 2001 From: ivolucas Date: Mon, 27 Mar 2017 12:08:07 +0100 Subject: [PATCH 2/5] Add a Transcoder that uses java serializer + LZ4 --- javalz4-serializer/README | 1 + javalz4-serializer/pom.xml | 74 +++++ .../serializer/javalz4/JavaLZ4Transcoder.java | 270 ++++++++++++++++++ .../javalz4/JavaLZ4TranscoderFactory.java | 111 +++++++ 4 files changed, 456 insertions(+) create mode 100644 javalz4-serializer/README create mode 100644 javalz4-serializer/pom.xml create mode 100644 javalz4-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalz4/JavaLZ4Transcoder.java create mode 100644 javalz4-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalz4/JavaLZ4TranscoderFactory.java diff --git a/javalz4-serializer/README b/javalz4-serializer/README new file mode 100644 index 00000000..fa006570 --- /dev/null +++ b/javalz4-serializer/README @@ -0,0 +1 @@ +A serialization strategy for the memcached-session-manager (http://code.google.com/p/memcached-session-manager ) that uses native Java Serialization and uses LZ4 to reduce the size of the session diff --git a/javalz4-serializer/pom.xml b/javalz4-serializer/pom.xml new file mode 100644 index 00000000..2d67f555 --- /dev/null +++ b/javalz4-serializer/pom.xml @@ -0,0 +1,74 @@ + + 4.0.0 + + de.javakaffee.msm + memcached-session-manager-project + 2.1.2-SNAPSHOT + + + de.javakaffee.msm + msm-javalz4-serializer + memcached-session-manager javalz4-serializer + 2.1.2-SNAPSHOT + jar + + + + de.javakaffee.msm + memcached-session-manager + ${project.version} + provided + + + org.apache.tomcat + juli + ${tomcat-version} + provided + + + org.apache.tomcat + coyote + ${tomcat-version} + provided + + + org.apache.tomcat + catalina + ${tomcat-version} + provided + + + + net.jpountz.lz4 + lz4 + 1.3.0 + + + + + + + + de.javakaffee.msm + memcached-session-manager + ${project.version} + test-jar + test + + + de.javakaffee.msm + memcached-session-manager-tc7 + ${project.version} + test + + + commons-lang + commons-lang + 2.6 + test + + + + + + diff --git a/javalz4-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalz4/JavaLZ4Transcoder.java b/javalz4-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalz4/JavaLZ4Transcoder.java new file mode 100644 index 00000000..d90e8eeb --- /dev/null +++ b/javalz4-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalz4/JavaLZ4Transcoder.java @@ -0,0 +1,270 @@ +package de.javakaffee.web.msm.serializer.javalz4; + +import de.javakaffee.web.msm.JavaSerializationTranscoder; +import de.javakaffee.web.msm.MemcachedBackupSession; +import de.javakaffee.web.msm.SessionAttributesTranscoder; +import de.javakaffee.web.msm.TranscoderDeserializationException; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.NotSerializableException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.ConcurrentModificationException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; +import net.jpountz.lz4.LZ4BlockInputStream; +import net.jpountz.lz4.LZ4BlockOutputStream; +import org.apache.catalina.session.StandardSession; +import org.apache.catalina.util.CustomObjectInputStream; +import org.apache.juli.logging.Log; +import org.apache.juli.logging.LogFactory; + +/** + * A {@link SessionAttributesTranscoder} that serializes catalina + * {@link StandardSession}s using java serialization (and the serialization + * logic of {@link StandardSession} as found in + * {@link StandardSession#writeObjectData(ObjectOutputStream)} and + * {@link StandardSession#readObjectData(ObjectInputStream)}). It is custom + * implementation of {@link JavaSerializationTranscoder} that uses + * {@link GZIPInputStream} and {@link GZIPOutputStream} to compress and + * decompress the session. + * + * @author ilucas + */ +public class JavaLZ4Transcoder implements SessionAttributesTranscoder { + + private static final Log LOG = LogFactory.getLog(JavaLZ4Transcoder.class); + + private static final String EMPTY_ARRAY[] = new String[0]; + + private final boolean retryActive; + private final int retryNumber; + private final int retryInterval; + /** + * The dummy attribute value serialized when a NotSerializableException is + * encountered in writeObject(). + */ + protected static final String NOT_SERIALIZED = "___NOT_SERIALIZABLE_EXCEPTION___"; + + private final ClassLoader classLoader; + + /** + * Constructor. + * + * @param manager the manager + */ + public JavaLZ4Transcoder() { + this(null, false, 1, 0); + } + + /** + * Constructor. + * + * @param classLoader + * @param compresser + * @param retryActive + * @param retryNumber + * @param retryInterval + */ + public JavaLZ4Transcoder(ClassLoader classLoader, boolean retryActive, int retryNumber, int retryInterval) { + this.retryActive = retryActive; + this.retryNumber = retryNumber; + this.retryInterval = retryInterval; + this.classLoader = classLoader; + + } + + /** + * {@inheritDoc} + */ + @Override + public byte[] serializeAttributes(final MemcachedBackupSession session, final ConcurrentMap attributes) { + if (attributes == null) { + throw new NullPointerException("Can't serialize null"); + } + if (retryActive) { + int i = 0; + ConcurrentModificationException concurrentModificationException = null; + do { + try { + return writeAttributes(session, attributes); + } catch (ConcurrentModificationException ex) { + concurrentModificationException = ex; + LOG.warn("ConcurrentModificationException on write attributes of session " + session.getIdInternal() + " retry " + i); + if (retryInterval > 0) { + try { + Thread.sleep(retryInterval); + } catch (InterruptedException interruptedException) { + LOG.debug("Interrupted on wating for try to writeAttributes: " + interruptedException); + } + } + } + } while (i++ < retryNumber); + throw concurrentModificationException; + } else { + + return writeAttributes(session, attributes); + + } + } + + private byte[] writeAttributes(final MemcachedBackupSession session, final ConcurrentMap attributes) throws IllegalArgumentException { + ByteArrayOutputStream bos = null; + OutputStream gzs = null; + ObjectOutputStream oos = null; + try { + bos = new ByteArrayOutputStream(); + gzs = new LZ4BlockOutputStream(bos); + oos = new ObjectOutputStream(gzs); + writeAttributes(session, attributes, oos); + closeSilently(oos); + closeSilently(gzs); + return bos.toByteArray(); + } catch (final IOException e) { + throw new IllegalArgumentException("Non-serializable object", e); + } finally { + closeSilently(oos); + closeSilently(gzs); + closeSilently(bos); + } + } + + private void writeAttributes(final MemcachedBackupSession session, final Map attributes, + final ObjectOutputStream oos) throws IOException { + + // Accumulate the names of serializable and non-serializable attributes + final String keys[] = attributes.keySet().toArray(EMPTY_ARRAY); + final List saveNames = new ArrayList(); + final List saveValues = new ArrayList(); + for (int i = 0; i < keys.length; i++) { + final Object value = attributes.get(keys[i]); + if (value == null) { + continue; + } else if (value instanceof Serializable) { + saveNames.add(keys[i]); + saveValues.add(value); + } else { + if (LOG.isDebugEnabled()) { + LOG.debug("Ignoring attribute '" + keys[i] + "' as it does not implement Serializable"); + } + } + } + + // Serialize the attribute count and the Serializable attributes + final int n = saveNames.size(); + oos.writeObject(Integer.valueOf(n)); + for (int i = 0; i < n; i++) { + oos.writeObject(saveNames.get(i)); + if (LOG.isDebugEnabled()) { + LOG.debug(" start storing attribute '" + saveNames.get(i) + "'"); + } + try { + oos.writeObject(saveValues.get(i)); + if (LOG.isDebugEnabled()) { + LOG.debug(" stored attribute '" + saveNames.get(i) + "' with value '" + saveValues.get(i) + "'"); + } + } catch (final NotSerializableException e) { + LOG.warn("Attribute " + saveNames.get(i) + " of session " + session.getIdInternal() + " is not serializable", e); + oos.writeObject(NOT_SERIALIZED); + if (LOG.isDebugEnabled()) { + LOG.debug(" storing attribute '" + saveNames.get(i) + "' with value NOT_SERIALIZED"); + } + } catch (final ConcurrentModificationException concurrentModificationException) { + // throw to catch and retry + throw concurrentModificationException; + } catch (final Exception e) { + LOG.warn("Attribute " + saveNames.get(i) + " of session " + session.getIdInternal() + " was not serialized correctly", e); + throw new IOException("Attribute " + saveNames.get(i) + " of session " + session.getIdInternal() + " was not serialized correctly, stream invalidated", e); + } + + } + + } + + /** + * Get the object represented by the given serialized bytes. + * + * @param in the bytes to deserialize + * @return the resulting object + */ + @Override + public ConcurrentMap deserializeAttributes(final byte[] in) { + ByteArrayInputStream bis = null; + InputStream gzs = null; + ObjectInputStream ois = null; + try { + bis = new ByteArrayInputStream(in); + gzs = new LZ4BlockInputStream(bis); + ois = createObjectInputStream(gzs); + + final ConcurrentMap attributes = new ConcurrentHashMap(); + final int n = ((Integer) ois.readObject()).intValue(); + for (int i = 0; i < n; i++) { + final String name = (String) ois.readObject(); + if (LOG.isDebugEnabled()) { + LOG.debug(" start reading attribute '" + name + "'"); + } + final Object value = ois.readObject(); + if ((value instanceof String) && (value.equals(NOT_SERIALIZED))) { + continue; + } + if (LOG.isDebugEnabled()) { + LOG.debug(" loading attribute '" + name + "' with value '" + value + "' to map"); + } + attributes.put(name, value); + } + + return attributes; + } catch (final ClassNotFoundException e) { + LOG.warn("Caught CNFE decoding " + in.length + " bytes of data", e); + throw new TranscoderDeserializationException("Caught CNFE decoding data", e); + } catch (final IOException e) { + LOG.warn("Caught IOException decoding " + in.length + " bytes of data", e); + throw new TranscoderDeserializationException("Caught IOException decoding data", e); + } finally { + closeSilently(bis); + closeSilently(gzs); + closeSilently(ois); + } + } + + private ObjectInputStream createObjectInputStream(final InputStream bis) throws IOException { + final ObjectInputStream ois; + if (classLoader != null) { + ois = new CustomObjectInputStream(bis, classLoader); + } else { + ois = new ObjectInputStream(bis); + } + return ois; + } + + private void closeSilently(final OutputStream os) { + if (os != null) { + try { + os.close(); + } catch (final IOException f) { + LOG.debug("Error on closeSilently OutputStream " + os, f); + } + } + } + + private void closeSilently(final InputStream is) { + if (is != null) { + try { + is.close(); + } catch (final IOException f) { + LOG.debug("Error on closeSilently InputStream:" + is, f); + } + } + } + +} diff --git a/javalz4-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalz4/JavaLZ4TranscoderFactory.java b/javalz4-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalz4/JavaLZ4TranscoderFactory.java new file mode 100644 index 00000000..e6d4c7b2 --- /dev/null +++ b/javalz4-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalz4/JavaLZ4TranscoderFactory.java @@ -0,0 +1,111 @@ +package de.javakaffee.web.msm.serializer.javalz4; + +import de.javakaffee.web.msm.MemcachedSessionService; +import de.javakaffee.web.msm.SessionAttributesTranscoder; +import de.javakaffee.web.msm.TranscoderFactory; +import org.apache.juli.logging.Log; +import org.apache.juli.logging.LogFactory; + +/** +* A {@link TranscoderFactory} that creates {@link JavaLZ4Transcoder} instances. + * @author ilucas + */ +public class JavaLZ4TranscoderFactory implements TranscoderFactory { + + private static final Log LOG = LogFactory.getLog(JavaLZ4TranscoderFactory.class ); + + /** + * Default number of tries to serialize attributes if a + * ConcurrentModificationException is thrown + */ + private static final int COCURRENT_MODIFICATION_EXCEPTION_RETRY_NUMBER = 3; + + /** + * Default interval in retries (ms) + */ + private static final int COCURRENT_MODIFICATION_EXCEPTION_RETRY_TIME = 0; + + + + /** + * Property name of retryActive + */ + private static final String COCURRENT_MODIFICATION_EXCEPTION_RETRY_ACTIVE_PROPERTY = "msm.JavaLZ4.retryActive"; + private static final String COCURRENT_MODIFICATION_EXCEPTION_RETRY_NUMBER_PROPERTY = "msm.JavaLZ4.retryNumber"; + private static final String COCURRENT_MODIFICATION_EXCEPTION_RETRY_INTERVAL_PROPERTY = "msm.JavaLZ4.retryInterval"; + + + private final boolean retryActive; + private final int retryNumber; + private final int retryInterval; + + public JavaLZ4TranscoderFactory() { + this.retryActive = getSysPropValue(COCURRENT_MODIFICATION_EXCEPTION_RETRY_ACTIVE_PROPERTY, false); + this.retryNumber = getSysPropValue(COCURRENT_MODIFICATION_EXCEPTION_RETRY_NUMBER_PROPERTY, COCURRENT_MODIFICATION_EXCEPTION_RETRY_NUMBER); + this.retryInterval = getSysPropValue(COCURRENT_MODIFICATION_EXCEPTION_RETRY_INTERVAL_PROPERTY, COCURRENT_MODIFICATION_EXCEPTION_RETRY_TIME); + } + + + /** + * {@inheritDoc} + */ + @Override + public SessionAttributesTranscoder createTranscoder( final MemcachedSessionService.SessionManager manager ) { + return new JavaLZ4Transcoder( manager.getContainerClassLoader(),retryActive,retryNumber,retryInterval ); + } + + /** + * If copyCollectionsForSerialization is set to true, + * an {@link UnsupportedOperationException} will be thrown, as java serialization + * cannot be changed and it does not copy collections for serialization. + * + * @param copyCollectionsForSerialization the copyCollectionsForSerialization value + */ + @Override + public void setCopyCollectionsForSerialization( final boolean copyCollectionsForSerialization ) { + if ( copyCollectionsForSerialization ) { + throw new UnsupportedOperationException( + "Java serialization cannot be changed - it does not copy collections for serialization." ); + } + } + + /** + * Throws an {@link UnsupportedOperationException}, as java serialization + * does not support custom xml format. + * + * @param customConverterClassNames a list of class names or null. + */ + @Override + public void setCustomConverterClassNames( final String[] customConverterClassNames ) { + if ( customConverterClassNames != null && customConverterClassNames.length > 0 ) { + throw new UnsupportedOperationException( "Java serialization does not support custom converter." ); + } + } + + private int getSysPropValue( final String propName, final int defaultValue ) { + int value = defaultValue; + final String propValue = System.getProperty( propName ); + if ( propValue != null ) { + try { + value = Integer.parseInt( propValue ); + } catch( final NumberFormatException e ) { + LOG.warn( "Could not parse system property " + propName + " using default value " +defaultValue + " : " + e ); + } + } + return value; + } + + private boolean getSysPropValue( final String propName, final boolean defaultValue ) { + boolean value = defaultValue; + final String propValue = System.getProperty( propName ); + if ( propValue != null ) { + try { + value = Boolean.parseBoolean( propValue ); + } catch( final NumberFormatException e ) { + LOG.warn( "Could not parse system property " + propName + " using default value " +defaultValue + " : " + e ); + } + } + return value; + } + +} From 8cae3f883b6fbcca5d0150ea924453a6d4bc33ee Mon Sep 17 00:00:00 2001 From: ivolucas Date: Mon, 27 Mar 2017 12:10:45 +0100 Subject: [PATCH 3/5] Add benchmark that uses a memcache to view the effects of session size --- pom.xml | 4 +- serializer-benchmark/pom.xml | 10 + .../msm/serializer/BenchmarkWithMemcache.java | 356 ++++++++++++++++++ 3 files changed, 369 insertions(+), 1 deletion(-) create mode 100644 serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/BenchmarkWithMemcache.java diff --git a/pom.xml b/pom.xml index 26c58fee..62f412b5 100644 --- a/pom.xml +++ b/pom.xml @@ -57,7 +57,9 @@ tomcat6 tomcat7 tomcat8 - kryo-serializer + kryo-serializer + javagzip-serializer + javalz4-serializer javolution-serializer xstream-serializer flexjson-serializer diff --git a/serializer-benchmark/pom.xml b/serializer-benchmark/pom.xml index ed180159..8e02372d 100644 --- a/serializer-benchmark/pom.xml +++ b/serializer-benchmark/pom.xml @@ -57,6 +57,16 @@ msm-xstream-serializer ${project.version} + + de.javakaffee.msm + msm-javalz4-serializer + ${project.version} + + + de.javakaffee.msm + msm-javagzip-serializer + ${project.version} + commons-lang commons-lang diff --git a/serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/BenchmarkWithMemcache.java b/serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/BenchmarkWithMemcache.java new file mode 100644 index 00000000..844fe2d5 --- /dev/null +++ b/serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/BenchmarkWithMemcache.java @@ -0,0 +1,356 @@ +/* + * Copyright 2010 Martin Grotzke + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package de.javakaffee.web.msm.serializer; + +import java.util.Calendar; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.catalina.core.StandardContext; +import org.apache.catalina.loader.WebappLoader; + +import de.javakaffee.web.msm.JavaSerializationTranscoder; +import de.javakaffee.web.msm.MemcachedBackupSession; +import de.javakaffee.web.msm.MemcachedBackupSessionManager; +import de.javakaffee.web.msm.SessionAttributesTranscoder; +import de.javakaffee.web.msm.TranscoderService; +import de.javakaffee.web.msm.serializer.TestClasses.Address; +import de.javakaffee.web.msm.serializer.TestClasses.Component; +import de.javakaffee.web.msm.serializer.TestClasses.Person; +import de.javakaffee.web.msm.serializer.TestClasses.Person.Gender; +import de.javakaffee.web.msm.serializer.javalgzip.JavaGzipTranscoder; +import de.javakaffee.web.msm.serializer.javalz4.JavaLZ4Transcoder; +import de.javakaffee.web.msm.serializer.kryo.KryoTranscoder; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.util.concurrent.TimeUnit; +import net.spy.memcached.MemcachedClient; + +/** + * A simple benchmark for existing serialization strategies. + * + * @author Martin Grotzke + */ +public class BenchmarkWithMemcache { + + /* + * 50000: + * -- JavaSerializationTranscoder -- +Serializing 1000 sessions took 156863 msec. +serialized size is 59016 bytes. +-- JavolutionTranscoder -- +Serializing 1000 sessions took 251870 msec. +serialized size is 138374 bytes. +-- KryoTranscoder -- +Serializing 1000 sessions took 154816 msec. +serialized size is 70122 bytes. + + */ + public static void main(final String[] args) throws InterruptedException, IOException { + + //Thread.sleep( 1000 ); + final MemcachedBackupSessionManager manager = createManager(); + + // some warmup + // final int warmupCycles = 100000; + final int warmupCycles = 100; + warmup(manager, new KryoTranscoder(), warmupCycles, 100, 3); + warmup(manager, new JavaSerializationTranscoder(), warmupCycles, 100, 3); + warmup(manager, new JavaLZ4Transcoder(null, true, 1, 0), warmupCycles, 100, 3); + warmup(manager, new JavaGzipTranscoder(null, true, 1, 0), warmupCycles, 100, 3); + + recover(); + + benchmark(manager, 10, 500, 4 /* 4^4 = 256 */); + benchmark(manager, 10, 100, 3 /* 3^3 = 27 */); + benchmark(manager, 10, 10, 2 /* 2^2 = 4 */); + + // Thread.sleep( Integer.MAX_VALUE ); + } + + private static void benchmark(final MemcachedBackupSessionManager manager, final int rounds, final int countPersons, + final int nodesPerEdge) throws InterruptedException, IOException { + + FullStats benchmarkKryo = benchmark(manager, new KryoTranscoder(), rounds, countPersons, nodesPerEdge); + + recover(); + + FullStats benchmarkJavaLZ4 = benchmark(manager, new JavaLZ4Transcoder(null, true, 1, 0), rounds, countPersons, nodesPerEdge); + recover(); + + FullStats benchmarkJavaGzip = benchmark(manager, new JavaGzipTranscoder(null, true, 1, 0), rounds, countPersons, nodesPerEdge); + + recover(); + + FullStats benchmarkJava = benchmark(manager, new JavaSerializationTranscoder(), rounds, countPersons, nodesPerEdge); + + System.out.println("Serialization,Size,Total-Min,Total-Avg,Total-Max,Ser-Min,Ser-Avg,Ser-Max,Deser-Min,Deser-Avg,Deser-Max,Write-Min,Write-Avg,Write-Max,Read-Min,Read-Avg,Read-Max"); + System.out.println(toCSV("Kryo", benchmarkKryo)); + System.out.println(toCSV("Java", benchmarkJava)); + System.out.println(toCSV("JavaLZ4", benchmarkJavaLZ4)); + System.out.println(toCSV("JavaGzip", benchmarkJavaGzip)); + + } + + private static String toCSV(final String name, FullStats fullstats) { + return name + "," + fullstats.size + "," + + minAvgMax(fullstats.totalTimeStats) + "," + + minAvgMax(fullstats.serializationStats) + "," + + minAvgMax(fullstats.deserializationStats) + "," + + minAvgMax(fullstats.writeMemcacheStats) + "," + + minAvgMax(fullstats.readMemcacheStats); + } + + private static String minAvgMax(final NanoStats stats) { + return stats.min + "," + stats.getAvg2() + "," + stats.max; + } + + private static void recover() throws InterruptedException { + Thread.sleep(100); + System.gc(); + Thread.sleep(1000); + } + + private static FullStats benchmark(final MemcachedBackupSessionManager manager, final SessionAttributesTranscoder transcoder, + final int rounds, final int countPersons, final int nodesPerEdge) throws InterruptedException, IOException { + + System.out.println("Running benchmark for " + transcoder.getClass().getSimpleName() + "..." + + " (rounds: " + rounds + ", persons: " + countPersons + ", nodes: " + ((int) Math.pow(nodesPerEdge, nodesPerEdge) + nodesPerEdge + 1) + ")"); + final NanoStats serializationStats = new NanoStats(); + final NanoStats deserializationStats = new NanoStats(); + final NanoStats writeMemcacheStats = new NanoStats(); + final NanoStats readMemcacheStats = new NanoStats(); + final NanoStats totalTimeStats = new NanoStats(); + + final TranscoderService transcoderService = new TranscoderService(transcoder); + + final MemcachedBackupSession session = createSession(manager, "123456789abcdefghijk987654321", countPersons, nodesPerEdge); + final byte[] data = transcoderService.serialize(session); + final int size = data.length; + MemcachedClient memcachedClient = new MemcachedClient(new InetSocketAddress("127.0.0.1", 11211)); + for (int r = 0; r < rounds; r++) { + + for (int i = 0; i < 500; i++) { + //TODO Add a timming or memcache server to take in account operations that are afected by the size of the session + long start = System.nanoTime(); + byte[] sessionData = transcoderService.serialize(session); + serializationStats.registerSince(start); + + long startMemcachedSet = System.nanoTime(); + memcachedClient.set("key" + i, 600, sessionData); + writeMemcacheStats.registerSince(startMemcachedSet); + + long startMemcachedGet = System.nanoTime(); + byte[] sessionDataRetrived = (byte[]) memcachedClient.get("key" + i); + readMemcacheStats.registerSince(startMemcachedGet); + + long startDeserialize = System.nanoTime(); + transcoderService.deserialize(sessionDataRetrived, manager); + deserializationStats.registerSince(startDeserialize); + totalTimeStats.registerSince(start); + } + } + memcachedClient.shutdown(10, TimeUnit.SECONDS); + return new FullStats(size, serializationStats, deserializationStats, writeMemcacheStats, readMemcacheStats, totalTimeStats); + } + + private static void warmup(final MemcachedBackupSessionManager manager, final SessionAttributesTranscoder transcoder, + final int loops, final int countPersons, final int nodesPerEdge) + throws InterruptedException { + + final TranscoderService transcoderService = new TranscoderService(transcoder); + final MemcachedBackupSession session = createSession(manager, "123456789abcdefghijk987654321", countPersons, nodesPerEdge); + + System.out.print("Performing warmup for serialization using " + transcoder.getClass().getSimpleName() + "..."); + final long serWarmupStart = System.currentTimeMillis(); + for (int i = 0; i < loops; i++) { + transcoderService.serialize(session); + } + System.out.println(" (" + (System.currentTimeMillis() - serWarmupStart) + " ms)"); + + System.out.print("Performing warmup for deserialization..."); + final byte[] data = transcoderService.serialize(session); + final long deserWarmupStart = System.currentTimeMillis(); + for (int i = 0; i < loops; i++) { + transcoderService.deserialize(data, manager); + } + System.out.println(" (" + (System.currentTimeMillis() - deserWarmupStart) + " ms)"); + + } + + private static MemcachedBackupSession createSession(final MemcachedBackupSessionManager manager, final String id, + final int countPersons, final int countNodesPerEdge) { + final MemcachedBackupSession session = manager.createEmptySession(); + session.setId(id); + session.setValid(true); + + session.setAttribute("stringbuffer", new StringBuffer("")); + session.setAttribute("stringbuilder", new StringBuilder("")); + + session.setAttribute("persons", createPersons(countPersons)); + session.setAttribute("mycontainer", new TestClasses.MyContainer()); + + session.setAttribute("component", createComponents(countNodesPerEdge)); + + return session; + } + + private static Component createComponents(final int countNodesPerEdge) { + final Component root = new Component("root"); + for (int i = 0; i < countNodesPerEdge; i++) { + final Component node = new Component("child" + i); + addChildren(node, countNodesPerEdge); + root.addChild(node); + } + return root; + } + + private static void addChildren(final Component node, final int count) { + for (int i = 0; i < count; i++) { + node.addChild(new Component(node.getName() + "-" + i)); + } + } + + private static Person[] createPersons(final int countPersons) { + final Person[] persons = new Person[countPersons]; + for (int i = 0; i < countPersons; i++) { + final Calendar dateOfBirth = Calendar.getInstance(); + dateOfBirth.set(Calendar.YEAR, dateOfBirth.get(Calendar.YEAR) - 42); + final Person person = TestClasses.createPerson("Firstname" + i + " Lastname" + i, + i % 2 == 0 ? Gender.FEMALE : Gender.MALE, + dateOfBirth, + "email" + i + "-1@example.org", "email" + i + "-2@example.org", "email" + i + "-3@example.org"); + person.addAddress(new Address("route66", "123456", "sincity", "sincountry")); + + if (i > 0) { + person.addFriend(persons[i - 1]); + } + + persons[i] = person; + } + return persons; + } + + private static MemcachedBackupSessionManager createManager() { + final MemcachedBackupSessionManager manager = new MemcachedBackupSessionManager(); + + final StandardContext container = new StandardContext(); + manager.setContainer(container); + + final WebappLoader webappLoader = new WebappLoader() { + /** + * {@inheritDoc} + */ + @Override + public ClassLoader getClassLoader() { + return Thread.currentThread().getContextClassLoader(); + } + }; + manager.getContainer().setLoader(webappLoader); + + return manager; + } + + static class NanoStats { + + long min; + long max; + long total = 0; + int samples; + + private boolean _first = true; + private final AtomicInteger _count = new AtomicInteger(); + + /** + * A utility method that calculates the difference of the time between + * the given startInNamo and {@link System#nanoTime()} and + * registers the difference via {@link #register(long)}. + * + * @param startInMillis the time in millis that shall be subtracted from + * {@link System#currentTimeMillis()}. + */ + public void registerSince(final long startInMillis) { + register(System.nanoTime() - startInMillis); + } + + /** + * Register the given value. + * + * @param value the value to register. + */ + public void register(final long value) { + if (value < min || _first) { + min = value; + } + if (value > max || _first) { + max = value; + } + total += value; + samples++; + _first = false; + } + + public double getAvg() { + return (total) / (samples); + } + + /** + * Avg discarding the best and the worst time + * @return avg discarding the best and the worst time + */ + public double getAvg2() { + return (total - min - max) / (samples - 2); + } + + /** + * Returns a string array with labels and values of count, min, avg and + * max. + * + * @return a String array. + */ + public String[] getInfo() { + return new String[]{ + "Count = " + _count.get(), + "Min = " + min, + "Avg = " + getAvg(), + "Avg2 = " + getAvg2(), + "Max = " + max + }; + } + + } + + static class FullStats { + + final NanoStats serializationStats; + final NanoStats deserializationStats; + final NanoStats writeMemcacheStats; + final NanoStats readMemcacheStats; + final NanoStats totalTimeStats; + final int size; + + public FullStats(int size, NanoStats serializationStats, NanoStats deserializationStats, NanoStats writeMemcacheStats, NanoStats readMemcacheStats, NanoStats totalTimeStats) { + this.serializationStats = serializationStats; + this.deserializationStats = deserializationStats; + this.writeMemcacheStats = writeMemcacheStats; + this.readMemcacheStats = readMemcacheStats; + this.totalTimeStats = totalTimeStats; + this.size = size; + } + + } + +} From e34a5b37792820178d83de6da77e6089a10f94c1 Mon Sep 17 00:00:00 2001 From: ivolucas Date: Mon, 27 Mar 2017 19:40:42 +0100 Subject: [PATCH 4/5] Add JavaLZ4Transcoder to Benchmark --- .../web/msm/serializer/Benchmark.java | 10 ++++++++++ .../msm/serializer/BenchmarkWithMemcache.java | 19 ++++--------------- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/Benchmark.java b/serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/Benchmark.java index 23cd412c..ddab1c81 100644 --- a/serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/Benchmark.java +++ b/serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/Benchmark.java @@ -31,6 +31,7 @@ import de.javakaffee.web.msm.serializer.TestClasses.Component; import de.javakaffee.web.msm.serializer.TestClasses.Person; import de.javakaffee.web.msm.serializer.TestClasses.Person.Gender; +import de.javakaffee.web.msm.serializer.javalz4.JavaLZ4Transcoder; import de.javakaffee.web.msm.serializer.javolution.JavolutionTranscoder; import de.javakaffee.web.msm.serializer.kryo.KryoTranscoder; @@ -67,6 +68,7 @@ public static void main( final String[] args ) throws InterruptedException { warmup( manager, new KryoTranscoder(), warmupCycles, 100, 3 ); warmup( manager, new JavaSerializationTranscoder(), warmupCycles, 100, 3 ); warmup( manager, new JavolutionTranscoder( Thread.currentThread().getContextClassLoader(), false ), warmupCycles, 100, 3 ); + warmup( manager, new JavaLZ4Transcoder(null, false, 1, 0), warmupCycles, 100, 3 ); recover(); benchmark( manager, 10, 500, 4 /* 4^4 = 256 */ ); @@ -96,10 +98,18 @@ private static void benchmark( final MemcachedBackupSessionManager manager, fina recover(); + final Stats javalz4SerStats = new Stats(); + final Stats javalz4DeSerStats = new Stats(); + benchmark( manager, new JavaLZ4Transcoder(null, false, 1, 0), javalz4SerStats, + javalz4DeSerStats, rounds, countPersons, nodesPerEdge ); + + recover(); + System.out.println( "Serialization,Size,Ser-Min,Ser-Avg,Ser-Max,Deser-Min,Deser-Avg,Deser-Max"); System.out.println( toCSV( "Java", javaSerStats, javaDeSerStats ) ); System.out.println( toCSV( "Javolution", javolutionSerStats, javolutionDeSerStats ) ); System.out.println( toCSV( "Kryo", kryoSerStats, kryoDeSerStats ) ); + System.out.println( toCSV( "JavaLZ4", javalz4SerStats, javalz4DeSerStats ) ); } private static String toCSV( final String name, final Stats serStats, final Stats deSerStats ) { diff --git a/serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/BenchmarkWithMemcache.java b/serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/BenchmarkWithMemcache.java index 844fe2d5..6f7875d4 100644 --- a/serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/BenchmarkWithMemcache.java +++ b/serializer-benchmark/src/main/java/de/javakaffee/web/msm/serializer/BenchmarkWithMemcache.java @@ -40,25 +40,15 @@ import net.spy.memcached.MemcachedClient; /** - * A simple benchmark for existing serialization strategies. + * A simple benchmark for existing serialization strategies with a memcache server + * to take in account the cost of the session size when writing and reeding. * * @author Martin Grotzke + * @author ivolucas */ public class BenchmarkWithMemcache { - /* - * 50000: - * -- JavaSerializationTranscoder -- -Serializing 1000 sessions took 156863 msec. -serialized size is 59016 bytes. --- JavolutionTranscoder -- -Serializing 1000 sessions took 251870 msec. -serialized size is 138374 bytes. --- KryoTranscoder -- -Serializing 1000 sessions took 154816 msec. -serialized size is 70122 bytes. - - */ + public static void main(final String[] args) throws InterruptedException, IOException { //Thread.sleep( 1000 ); @@ -144,7 +134,6 @@ private static FullStats benchmark(final MemcachedBackupSessionManager manager, for (int r = 0; r < rounds; r++) { for (int i = 0; i < 500; i++) { - //TODO Add a timming or memcache server to take in account operations that are afected by the size of the session long start = System.nanoTime(); byte[] sessionData = transcoderService.serialize(session); serializationStats.registerSince(start); From d949ac170160fb7cf48ba37de357a9155f33b578 Mon Sep 17 00:00:00 2001 From: ivolucas Date: Mon, 27 Mar 2017 19:48:50 +0100 Subject: [PATCH 5/5] Minor correction --- .../web/msm/serializer/javalz4/JavaLZ4Transcoder.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/javalz4-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalz4/JavaLZ4Transcoder.java b/javalz4-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalz4/JavaLZ4Transcoder.java index d90e8eeb..16600848 100644 --- a/javalz4-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalz4/JavaLZ4Transcoder.java +++ b/javalz4-serializer/src/main/java/de/javakaffee/web/msm/serializer/javalz4/JavaLZ4Transcoder.java @@ -70,7 +70,6 @@ public JavaLZ4Transcoder() { * Constructor. * * @param classLoader - * @param compresser * @param retryActive * @param retryNumber * @param retryInterval @@ -119,7 +118,7 @@ public byte[] serializeAttributes(final MemcachedBackupSession session, final Co private byte[] writeAttributes(final MemcachedBackupSession session, final ConcurrentMap attributes) throws IllegalArgumentException { ByteArrayOutputStream bos = null; - OutputStream gzs = null; + LZ4BlockOutputStream gzs = null; ObjectOutputStream oos = null; try { bos = new ByteArrayOutputStream();