Import users into Liferay

If you’re using Liferay, and not using LDAP, you will probably face into the same problem I did – how to import a large amount of users without manually adding them to the system.

So, loving automation, I’ve decided to create a simple Portlet that does just that.

  1. Create a new Dynamic Web App in Eclipse.
  2. Configure all necessary deployment files (liferay-portlet.xml, portlet.xml, web.xml etc)
  3. Create a new User class:
    package com.tona.liferay.web;public class User {
    private String firstName;
    private String lastName;
    private String email;
    private String phoneNo;
    private String screenName;
    private String password;
    
    public String getEmail() {
    return email;
    }
    
    public void setEmail(String email) {
    this.email = email;
    }
    
    public String getPhoneNo() {
    return phoneNo;
    }
    
    public void setPhoneNo(String phoneNo) {
    this.phoneNo = phoneNo;
    }
    
    public String getFirstName() {
    return firstName;
    }
    
    public void setFirstName(String firstName) {
    this.firstName = firstName;
    }
    
    public String getLastName() {
    return lastName;
    }
    
    public void setLastName(String lastName) {
    this.lastName = lastName;
    }
    
    public String getScreenName() {
    return screenName;
    }
    
    public void setScreenName(String screenName) {
    this.screenName = screenName;
    }
    
    public String getPassword() {
    return password;
    }
    
    public void setPassword(String password) {
    this.password = password;
    }
    
    public User(String line) {
    String[] tokens = line.split(",");
    setFirstName(tokens[1]);
    setLastName(tokens[2]);
    setEmail(tokens[3]);
    setPhoneNo(tokens[4]);
    String screenName = getFirstName() + getLastName().substring(0, 3);
    setScreenName(screenName.toLowerCase());
    setPassword(getScreenName() + "123");
    }
    
    public User() {
    
    }
    
    }
    
  4. Create the portlet itself:
    package com.tona.liferay.web;import java.io.BufferedReader;
    import java.io.FileReader;
    import java.io.IOException;
    import java.util.ArrayList;
    import java.util.List;
    import java.util.Locale;
    import java.util.logging.Level;
    import java.util.logging.Logger;
    
    import javax.portlet.ActionRequest;
    import javax.portlet.ActionResponse;
    import javax.portlet.PortletException;
    
    import com.liferay.portal.model.Company;
    import com.liferay.portal.service.CompanyLocalServiceUtil;
    import com.liferay.portal.service.UserLocalServiceUtil;
    import com.liferay.util.bridges.mvc.MVCPortlet;
    
    public class ImportUsersPortlet extends MVCPortlet {
    public void importUsers(ActionRequest actionRequest,
    ActionResponse actionResponse) throws IOException, PortletException {
    
    String fileName = actionRequest.getParameter("fileName");
    
    BufferedReader fr = new BufferedReader(new FileReader(fileName));
    
    Listusers = new ArrayList();
    
    String line;
    
    while ((line = fr.readLine()) != null) {
    users.add(new User(line));
    }
    
    // We now have the user list
    com.liferay.portal.service.ServiceContext context = new com.liferay.portal.service.ServiceContext();
    long companyId = 0;
    
    try {
    Company company = CompanyLocalServiceUtil.getCompanies().get(0);
    companyId = company.getCompanyId();
    for (User user : users) {
    try {
    UserLocalServiceUtil.addUser(0, companyId, false,
    user.getPassword(), user.getPassword(), false, user.getScreenName(),
    user.getEmail(), 0, "", Locale.getDefault(),
    user.getFirstName(), "", user.getLastName(), 0, 0,
    true, 1, 1, 1970, null, null, null, null, null, false,
    context);
    } catch (Exception e) {
    e.printStackTrace();
    }
    }
    } catch (Exception e) {
    e.printStackTrace();
    }
    
    }
    }
    
  5. Create a WAR file and deploy it in Liferay
  6. Note that the portlet does not upload the CSV file – and expects it to exist on the Liferay server itself
  7. You can of course change the algorithm of the screen-name and password creation, by changing the User constructor method.

Updating Pentaho PRPT files to add a PreProcessor

In my previous post (see here) I mentioned that I couldn’t add a pre-processor to a Pentaho report using the report designer. So, I’ve written a short Java program that does just that.
Note that I use a neat open source library called Zip4J (you can get it here).

package com.tona.rprt;

import java.io.File;
import java.io.FileWriter;

import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;

import net.lingala.zip4j.core.ZipFile;
import net.lingala.zip4j.model.ZipParameters;

import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;

public class Main {
	private static final String CONFIG_FILE_NAME = "layout.xml";
	
	public static void main(String[] args) throws Exception {
		ZipFile reportFile = new ZipFile("");

		File tempDirectory = createTempDirectory();
		String path = tempDirectory.getAbsolutePath();
		reportFile.extractFile(CONFIG_FILE_NAME, path);

		System.out.println("Extraced file to " + path);
		File updatedFile = new File(path + File.separator + CONFIG_FILE_NAME);

		// Update the file
		DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
		DocumentBuilder db = dbf.newDocumentBuilder();
		Document doc = db.parse(updatedFile);
		
		System.out.println("Parsed document");
		
		Element layoutNode = doc.getDocumentElement();
		Element preProcessorElement = doc.createElement("preprocessor");
		preProcessorElement.setAttribute("class", "com.tona.report.infra.TonaWizardProcessor");
		Node firstLayoutChild = layoutNode.getFirstChild(); 
		layoutNode.insertBefore(preProcessorElement, firstLayoutChild);
		
		System.out.println("Added child");

		FileWriter output = new FileWriter(updatedFile);
		javax.xml.transform.stream.StreamResult result = new javax.xml.transform.stream.StreamResult(output);

		TransformerFactory tf = TransformerFactory.newInstance();
		Transformer t = tf.newTransformer();
		t.transform(new DOMSource(doc), result);
		
		System.out.println("Updated XML file");
		
		ZipParameters parameters = new ZipParameters();
		reportFile.removeFile(CONFIG_FILE_NAME);
		reportFile.addFile(updatedFile, parameters);
		
		System.out.println("Update ZIP file");
		
		tempDirectory.delete();
		
		System.out.println("Removed temporary directory");
	}
	
	private static File createTempDirectory() throws Exception
		{
		    File temp = File.createTempFile("temp", Long.toString(System.nanoTime()));

		    if(!(temp.delete())) {
		        throw new Exception("Could not delete temp file: " + temp.getAbsolutePath());
		    }

		    if(!(temp.mkdir())) {
		        throw new Exception("Could not create temp directory: " + temp.getAbsolutePath());
		    }

		    return temp;
		}	
}

Using Oracle tkprof with JDBC thin client application

When attempting to profile a Hibernate based application, I found a statement that was incredibly slow and caused the system to basically halt for a few seconds before resuming execution. I wanted to profile it at the database level, and the best tool for the job is Oracle’s own tkprof.
The input for tkprof is a session trace file, and enabling one is a bit tricky. The reason – a JavaEE application, with multiple threads, have multiple connections, multiple database sessions, and the SQL_TRACE is on a per session level (I didn’t want to configure it for the entire database – the trace file would be totally unusable…)
So, I took the code and ran it in a standalone Java application, and enabled SQL Trace. Here’s how:

package com.tona.jdbc;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;

import oracle.jdbc.OracleCallableStatement;

public class TestJDBC {
	public static void main(String[] args) throws Exception {
		Connection c = DriverManager.getConnection(JDBC_CONNECTION_STRING);
		
                // Set SQL_Trace to on
		Statement stmt = c.createStatement();
		stmt.executeUpdate("ALTER SESSION SET SQL_TRACE=TRUE");
		stmt.close();
		
                // Set SQL Trace location
		stmt = c.createStatement();
		stmt.executeUpdate("alter system set USER_DUMP_DEST='/tmp'");
		stmt.close();

		// JDBC logic comes here...

		c.close();
	}
}

Note
Changing the USER_DUMP_DEST parameter did not have any effect, and the SQL_TRACE was written to the default trace log directory on the server (in my case it was ./u01/app/oracle/diag/rdbms/SID/SID/trace)

Batch Update with JPA

A customer of mine has a highly scalable system, with high database load. To improve performance, we’ve decided to use Batch update. Alas – the application uses JPA, so how do we do it?

SessionFactoryImplementor sfi = SessionFactoryImplementor)entityManager.unwrap(Session.class).getSessionFactory();
//Retrieve DB connection
connect = sfi.getConnectionProvider().getConnection();
PreparedStatement ps = connect.prepareStatement("INSERT INTO temp_table values(?,?)");
for (Data p : insertCommands) {
	ps.setInt(1, p.getId());
	if (p.isWorking() != null) {
		ps.setInt(2, p.isWorking() ? 1 : 0);
	} else {
		ps.setNull(2, Types.INTEGER);
	}
	ps.addBatch();
}
ps.executeBatch();

Some points to remember:

  1. Do not close the connection!!! When closing the connection, it returns to the connection pool, without commit or rollback. You’ll get some exceptions in your code, but mainly – you’re open to database locks and dead-locks, as the connection keeps all the locks that were done in the batch update. And since other threads will reuse the connection – it will never close.
  2. Do not use Statement.addBatch. Use PreparedStatement.addBatch. When I checked both solutions, the Statement one took anywhere between 30s-60s. The PreparedStatement version took <300ms…

How to reuse a java.lang.StringBuilder

It’s common knowledge in Java that you want to clear a StringBuilder object, and not recreate it. Why? GC for instance. But, surprisingly enough, there is no clear() method on the StringBuilder class, and so we resort to manual clear methods. The ones I’ve seen are:

  • new StringBuilder which basically recreates the object
  • setLength(0) which changes the internal flags of the StringBuilder
  • delete(0,sb.length()) which deletes the whole buffer

One would think that the best technique is setLength – but I’ve decided to give it a short test.
Here is the code I’ve used:

package com.tona;

import java.util.Random;

public class TestStringBuffer {
	private static final int NUM_OF_RUNS = 100000;
	private static final int MAX_LENGTH = 1000;
	private static Random rand = new Random();

	private static int fillString(StringBuilder sb,boolean isSameLenString) {
		
		int length;
		
		if (isSameLenString)
			length = MAX_LENGTH;
		else 
			length = rand.nextInt(MAX_LENGTH);
		
		for (int i = 0; i < length; ++i) 
			sb.append("a");
		
		return length;
	}
	
	private void runBenchmark(boolean isSameLenString) {
		StringBuilder sb = new StringBuilder();
		long start = System.currentTimeMillis();

		for (int i = 0; i < NUM_OF_RUNS; ++i) {
			fillString(sb,isSameLenString);
			sb.setLength(0);
		}
		long end = System.currentTimeMillis();
		long diffWithLen0 = end - start;
		
		start = System.currentTimeMillis();

		for (int i = 0; i < NUM_OF_RUNS; ++i) {
			fillString(sb,isSameLenString);
			sb = new StringBuilder();
		}
		end = System.currentTimeMillis();
		long diffWithNew = end - start;

		start = System.currentTimeMillis();

		for (int i = 0; i < NUM_OF_RUNS; ++i) {
			fillString(sb,isSameLenString);
			sb = new StringBuilder(MAX_LENGTH);
		}
		end = System.currentTimeMillis();
		long diffWithNewConstLength = end - start;

		start = System.currentTimeMillis();

		for (int i = 0; i < NUM_OF_RUNS; ++i) {
			fillString(sb,isSameLenString);
			sb.delete(0, sb.length());
		}
		end = System.currentTimeMillis();
		long diffWithDelete = end - start;

		start = System.currentTimeMillis();

		for (int i = 0; i < NUM_OF_RUNS; ++i) {
			int length = fillString(sb,isSameLenString);
			sb.delete(0, length);
		}
		end = System.currentTimeMillis();
		long diffWithDeleteConstLength = end - start;

		System.out.println("With setLength(0) " + diffWithLen0);
		System.out.println("With new StringBuilder() " + diffWithNew);
		System.out.println("With new StringBuilder(MAX_LENGTH) " + diffWithNewConstLength);
		System.out.println("With delete(0, sb.length()) " + diffWithDelete);
		System.out.println("With delete(0, length) " + diffWithDeleteConstLength);
	}
	
	public static void main(String[] args) {
		TestStringBuffer test = new TestStringBuffer();
		System.out.println("Constant length string");
		test.runBenchmark(true);
		System.out.println("Changing length string");
		test.runBenchmark(false);
		
	}
}

And here are the results:

Constant length string
With setLength(0) 1524
With new StringBuilder() 1501
With new StringBuilder(MAX_LENGTH) 1365
With delete(0, sb.length()) 1369
With delete(0, length) 1391
Changing length string
With setLength(0) 686
With new StringBuilder() 743
With new StringBuilder(MAX_LENGTH) 796
With delete(0, sb.length()) 715
With delete(0, length) 698

(Note that changing string length uses a Random, so results may vary).
With StringBuilder object with a fixed length, the new StringBuilder with the predefined length is the best option, although not much slower than the delete method. With the varying length strings, setLength and delete and more of less on par (give it a few runs and you’ll see it for yourself).
So, now you know how to clean your StringBuilder object. Enjoy.

Improving authorization performance in Liferay

I have recently run a benchmark on a Liferay 6.1 portal. Most of the bottlenecks were the result of the portlets themselves, but a major point was the authorization mechanism of Liferay.
The way Liferay authorization works is that for every component on the screen an authorization check is made. Now, this makes sense. But the problem is that the default implementation is a bit naive – it goes to the database to check if the user has the correct authorization. And since rights can be inherited – each check goes multiple times to the database.
So, I set out to solve this irritating issue by using a cache. Now – this cache is naive too – if a user was granted a new role – a server restart is needed. But this implementation is easy to extend.

  1. Create a new class, that extends com.liferay.portal.security.permission.AdvancedPermissionChecker
  2. Open the portal-ext.properties file.
  3. Add the property permissions.checker={your class name} to it
  4. The class code should look like this:
    [java]
    package com.tona.liferay.permission;

    import java.util.HashMap;
    import java.util.Map;

    public class TonaPermissionChecker extends
    com.liferay.portal.security.permission.AdvancedPermissionChecker {

    private static Map permissionCache = new HashMap();

    public boolean hasPermission(long groupId, String name, String primKey,
    String actionId) {

    PermissionKey key = new PermissionKey(groupId, name, primKey, actionId);
    if (permissionCache.containsKey(key)) {
    return permissionCache.get(key);
    }

    boolean result = super.hasPermission(groupId, name, primKey, actionId);
    permissionCache.put(key, result);

    return result;

    }

    private class PermissionKey {
    private long groupId;
    private String name;
    private String primKey;
    private String actionId;

    public PermissionKey(long groupId, String name, String primKey,
    String actionId) {
    super();
    this.groupId = groupId;
    this.name = name;
    this.primKey = primKey;
    this.actionId = actionId;
    }

    public long getGroupId() {
    return groupId;
    }

    public void setGroupId(long groupId) {
    this.groupId = groupId;
    }

    public String getName() {
    return name;
    }

    public void setName(String name) {
    this.name = name;
    }

    public String getPrimKey() {
    return primKey;
    }

    public void setPrimKey(String primKey) {
    this.primKey = primKey;
    }

    public String getActionId() {
    return actionId;
    }

    public void setActionId(String actionId) {
    this.actionId = actionId;
    }

    @Override
    public int hashCode() {
    final int prime = 31;
    int result = 1;
    result = prime * result
    + ((actionId == null) ? 0 : actionId.hashCode());
    result = prime * result + (int) (groupId ^ (groupId >>> 32));
    result = prime * result + ((name == null) ? 0 : name.hashCode());
    result = prime * result
    + ((primKey == null) ? 0 : primKey.hashCode());
    return result;
    }

    @Override
    public boolean equals(Object obj) {
    if (this == obj)
    return true;
    if (obj == null)
    return false;
    if (getClass() != obj.getClass())
    return false;
    PermissionKey other = (PermissionKey) obj;
    if (actionId == null) {
    if (other.actionId != null)
    return false;
    } else if (!actionId.equals(other.actionId))
    return false;
    if (groupId != other.groupId)
    return false;
    if (name == null) {
    if (other.name != null)
    return false;
    } else if (!name.equals(other.name))
    return false;
    if (primKey == null) {
    if (other.primKey != null)
    return false;
    } else if (!primKey.equals(other.primKey))
    return false;
    return true;
    }
    }
    }
    [/java]

  5. Package the class in a JAR file, and put in TOMCAT_HOME/webapps/ROOT/WEB-INF/lib

Tunneling with JProfiler

I had some issues running JProfiler from a remote machine. I had a JBoss running on a remote Linux server, and for some reason, XWindows just didn’t work. Turns out I had to tunnel all my JProfiler connection, and luckily – that’s easy to do.
I typed a command on my own laptop (running Linux Mint)

ssh -f root@XXX.XXX.XXX.XXX -L 2000:localhost:8849 -N

And now, all I have to do is open a connection from JProfiler. I use attach to remote process, select localhost, port 2000, and that’s it – I can now profile the remote server.