Dec 8, 2012

In OIM 11g, the entity adapters cannot be attached to the user form. So instead you will have to re-implement the entity adapters as event handlers. I had a requirement recently to do the same. The good thing about it is you can combine multiple operations into one single event handler. In this post i am going to share the code for one pre-process and one post-process event handler.
  1. The pre-process event handler is going to generate the user login based upon the user type.
  2. The post process is going to generate email id for the user if one is not entered initially.
The pre process event handler code is mentioned in the below snippet.
package com.blogspot.ramannanda.oim.handlers;

import java.io.Serializable;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.HashMap;
import oracle.adf.share.logging.ADFLogger;
import oracle.iam.identity.usermgmt.api.UserManagerConstants;
import oracle.iam.platform.Platform;
import oracle.iam.platform.context.ContextAware;
import oracle.iam.platform.kernel.spi.PreProcessHandler;
import oracle.iam.platform.kernel.vo.AbstractGenericOrchestration;
import oracle.iam.platform.kernel.vo.BulkEventResult;
import oracle.iam.platform.kernel.vo.BulkOrchestration;
import oracle.iam.platform.kernel.vo.EventResult;
import oracle.iam.platform.kernel.vo.Orchestration;

import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.DefaultTransactionDefinition;

public class GenerateLoginID implements PreProcessHandler {
    private ADFLogger eventLogger =
        ADFLogger.createADFLogger(GenerateLoginID.class);
    private PlatformTransactionManager txManager =
        Platform.getPlatformTransactionManager();

    public GenerateLoginID() {
        super();
    }

    private String getParamaterValue(HashMap<String, Serializable> parameters,
                                     String key) {
        String value =
            (parameters.get(key) instanceof ContextAware) ? (String)((ContextAware)parameters.get(key)).getObjectValue() :
            (String)parameters.get(key);
        return value;
    }

    /**
     *
     *This method is used to populate the user id
     * @param l
     * @param l1
     * @param orchestration
     * @return
     */

    public EventResult execute(long processId, long eventId,
                               Orchestration orchestration) {
        String methodName =
            Thread.currentThread().getStackTrace()[1].getMethodName();
        eventLogger.entering(methodName,
                             "params :[" + processId + "," + eventId + "]");
        HashMap map = orchestration.getParameters();
        String employeeType =
            getParamaterValue(map, UserManagerConstants.AttributeName.EMPTYPE.getId());
        eventLogger.info("[" + methodName + "]  Employee Type " +
                         employeeType);
        String userId = generateEMPID(employeeType);
        System.out.println("[" + methodName + "]  got user Id " + userId);
        eventLogger.info("[" + methodName + "]   got user Id " + userId);
        map.put("User Login", userId);
        //generate home directory here we are updating another attribute
        map.put("Home Directory", "/home/" + userId);
        EventResult result = new EventResult();

        return result;
    }


    /**
     *
     * @param processId the processId
     * @param eventId the event Id
     * @param bulkOrchestration
     * @return
     */

    public BulkEventResult execute(long processId, long eventId,
                                   BulkOrchestration bulkOrchestration) {
        HashMap<String, Serializable>[] params =
            bulkOrchestration.getBulkParameters();
        for (int i = 0; i < params.length; i++) {
            HashMap<String, Serializable> orchParam = params[i];
            String employeeType =
                getParamaterValue(orchParam, UserManagerConstants.AttributeName.EMPTYPE.getId());
            String userId = generateEMPID(employeeType);
            orchParam.put(UserManagerConstants.AttributeName.USER_LOGIN.getId(),
                          userId);
            //custom attribute
            orchParam.put("Home Directory", "/home/" + userId);
        }

        return new BulkEventResult();
    }


    public void compensate(long l, long l1,
                           AbstractGenericOrchestration abstractGenericOrchestration) {
    }

    public boolean cancel(long l, long l1,
                          AbstractGenericOrchestration abstractGenericOrchestration) {
        return false;
    }

    public void initialize(HashMap<String, String> hashMap) {
    }

    private String generateEMPID(String employeeType) {
        Long Id = null;
        String methodName =
            Thread.currentThread().getStackTrace()[1].getMethodName();
        Connection con = null;
        Statement st = null;
        ResultSet rs = null;
        TransactionStatus txStatus =
            txManager.getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRES_NEW));
        boolean rollback = false;
        try {
            con = Platform.getOperationalDS().getConnection();
            st = con.createStatement();
            eventLogger.info("[" + methodName + "] Before Executing query");

            if (employeeType.equalsIgnoreCase("Full-Time")) {
                rs = st.executeQuery("select USERID_FT.nextval from dual");
            } else if (employeeType.equalsIgnoreCase("Temp")) {
                rs = st.executeQuery("select USERID_TMP.nextval from dual");
            } else if (employeeType.equalsIgnoreCase("Consultant")) {
                rs = st.executeQuery("select USERID_CONS.nextval from dual");
            }
            //other conditions here
            if (rs.next()) {
                Id = rs.getLong(1);
            }
        } catch (Exception e) {
            rollback = true;
            eventLogger.severe("[" + methodName +
                               "] Error occured in execution" +
                               e.getMessage());

        } finally {
            try {
                if (rs != null) {
                    rs = null;
                }
                if (st != null) {
                    st = null;
                }
                if (con != null) {
                    con.close();
                }
                if (rollback)
                    txManager.rollback(txStatus);
                else
                    txManager.commit(txStatus);
            } catch (Exception e) {
                eventLogger.severe("[" + methodName +
                                   "] Error occured in execution" +
                                   e.getMessage());
            }
        }
        if (Id != null) {
            return Id.toString();
        } else
            return null;


    }
}


As this is pre process event handler all you have to do is place the new attribute value in the orchestration parameter map and it will be saved in the user profile. The generateEmpId method utilizes the transaction manager api.


The snippet for post process event is mentioned below.


package com.blogspot.ramannanda.oim.handlers;

import java.io.Serializable;

import java.util.HashMap;

import oracle.adf.share.logging.ADFLogger;

import oracle.iam.identity.usermgmt.api.UserManagerConstants;
import oracle.iam.platform.Platform;
import oracle.iam.platform.context.ContextAware;
import oracle.iam.platform.entitymgr.EntityManager;
import oracle.iam.platform.kernel.spi.PreProcessHandler;
import oracle.iam.platform.kernel.vo.AbstractGenericOrchestration;
import oracle.iam.platform.kernel.vo.BulkEventResult;
import oracle.iam.platform.kernel.vo.BulkOrchestration;
import oracle.iam.platform.kernel.vo.EventResult;
import oracle.iam.platform.kernel.vo.Orchestration;

public class GenerateEmailId implements PostProcessHandler{
    private ADFLogger eventLogger=ADFLogger.createADFLogger(GenerateEmailId.class);

    public GenerateEmailId() {
        super();
    }
    /**
     * Gets the parameter value from parameter map
     * @param parameters
     * @param key
     * @return
     */
    private String getParamaterValue(HashMap<String, Serializable> parameters, String key) {
      String value = (parameters.get(key) instanceof ContextAware)
      ? (String) ((ContextAware) parameters.get(key)).getObjectValue()
      : (String) parameters.get(key);
      return value;
    }
    /**
     *
     * This method is used to populate the value for email id based on first name and last name
     * @param processId
     * @param eventId
     * @param orchestration
     * @return
     */
    public EventResult execute(long processId, long eventId,
                               Orchestration orchestration) {
        String methodName =
            Thread.currentThread().getStackTrace()[1].getMethodName();
        eventLogger.entering(methodName, "params :["+processId+","+eventId+"]");
        EntityManager mgr=Platform.getService(EntityManager.class);
        HashMap map = orchestration.getParameters();
        String email=getParamaterValue(map, UserManagerConstants.AttributeName.EMAIL.getId());
        if(email==null||email.isEmpty()){
                String firstName=getParamaterValue(map, UserManagerConstants.AttributeName.FIRSTNAME.getId());
                String lastName=getParamaterValue(map, UserManagerConstants.AttributeName.LASTNAME.getId());
                String generatedEmail=generateEmail(firstName,lastName);
                HashMap modifyMap=new HashMap();
                modifyMap.put(UserManagerConstants.AttributeName.EMAIL.getId(),generatedEmail);
                try {
                    mgr.modifyEntity(orchestration.getTarget().getType(), orchestration.getTarget().getEntityId(), modifyMap);
                }catch (Exception e){
                    eventLogger.severe("[" + methodName +
                                             "] Error occured in updating user" + e.getMessage());
                    } 
            }
        // asynch events must return null
                return null;
 }
    /**
     * Generates email 
     * @param firstName
     * @param lastName
     * @return
     */
    private String generateEmail(String firstName, String lastName) {
        return null;
    }

    public BulkEventResult execute(long l, long l1,
                                   BulkOrchestration bulkOrchestration) {
        return null;
    }

    public void compensate(long l, long l1,
                           AbstractGenericOrchestration abstractGenericOrchestration) {
    }

    public boolean cancel(long l, long l1,
                          AbstractGenericOrchestration abstractGenericOrchestration) {
        return false;
    }

    public void initialize(HashMap<String, String> hashMap) {
    }
}


Note that post process events must return null as the event result as they are asynchronous event. The generate email is just a stub and is actually not implemented.


The event handler xml that you need to import into mds is mentioned below.


<?xml version="1.0" encoding="UTF-8"?>
<eventhandlers xmlns="http://www.oracle.com/schema/oim/platform/kernel" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.oracle.com/schema/oim/platform/kernel orchestration-handlers.xsd">
   <action-handler class="com.blogspot.ramannanda.oim.handlers.GenerateLoginID" entity-type="User" operation="CREATE" name="GenerateLoginID" stage="preprocess" order="1000" sync="FALSE"/>
   <action-handler class="com.blogspot.ramannanda.oim.handlers.GenerateEmailId" entity-type="User" operation="CREATE" name="GenerateEmailId" stage="postprocess" order="1001" sync="FALSE"/>
</eventhandlers>

Now for the logging part, in the snippets, i have used adflogger implementation. All you need to do to use it to add the logger with name com.blogspot to the oim server config, you could also add a log handler to place the logging output in a separate file.

Posted on Saturday, December 08, 2012 by Unknown

I had recently installed newest version of the songbird, but found out that equalizer preset could not be added to it because newer version of the add-on was not available. The workaround is mentioned below.

  1. Just download the equalizer preset + plug-in from songbird site.
  2. Open it with 7zip or other archive software.
  3. Open the install.rdf present in the archive and in the songbird section edit the em:maxversion to 2.1.0.
  4. Install the add-on manually by clicking the install and selecting the add-on from the file system and restart songbird.

Posted on Saturday, December 08, 2012 by Unknown

Sep 29, 2012

If you have a requirement for doing code coverage in an ADF application you can easily accomplish it using EMMA. The good thing about EMMA is that it supports offline instrumentation which essentially helps in collecting and merging the data from various runs plus in case of offline instrumentation you are not affected by the runtime class loading behavior of the application container. The concept in brief is to have the compiled classes with byte-code instrumentation done by EMMA and then later post you run your use cases you can merge the coverage data with the class metadata and generate the report regarding the code coverage done during testing.
The steps to accomplish it for your ADF app are simple.
  1. Just create an ant build file for each of your projects i.e one for your model project and the other for your view project. Also make sure that you select the ojdeploy tasks while doing so for your view controller project. build
  2. Now add the emma ant task for generating the class metadata for each of your projects. The following snippet shows the relevant ant tasks for the ViewController project.
    <property file="build1.properties"/>
        <path id='emma.lib' >
        <pathelement location="${emma.dir}/emma.jar"/>
        <pathelement location="${emma.dir}/emma_ant.jar"/>
      </path>
       <taskdef resource="emma_ant.properties" classpathref="emma.lib"/>
     <target name="init">
        <tstamp/>
        <mkdir dir="${output.dir}"/>
         <mkdir dir="${coverage.dir}"/>
        <mkdir dir="${instrumentation.dir}"/>
      </target>
    <target name="emma" description="turns on EMMA instrumentation/reporting" >
        <!-- EMMA instr class output directory: -->
         <property name="emma.enabled" value="true" />
        <property name="out.instr.dir" value="outinstr" />
        <mkdir dir="${out.instr.dir}" />
    </target>
    <target name="emmabuild" depends="clean,init,compile,emma,copy" description="runs the examples" >
        <emma  enabled="${emma.enabled}"  >
        <instr merge="yes" filter="com.blogspot.*"  destdir="${instrumentation.dir}" instrpath="${output.dir}" mode="copy"/>
     </emma>
     </target>
      <target name="instrumentView" description="Copy instrumented classes to output directory"
              depends="emmabuild">
              
        <patternset id="copy1.patterns">
        <include name="**/*.class"/>
        </patternset>
           <copy todir="${output.dir}">
          <fileset dir="${instrumentation.dir}">
            <patternset refid="copy1.patterns"/>
          </fileset>
          </copy>
      </target>

    As can be seen from the above snippet EMMA provides you with filtering capabilities so that you can select the classes that you want to be part of the instrumented build.


  3. The following snippet shows the relevant snippet from ojdeploy task
    <target name="deploy" description="Deploy JDeveloper profiles"
              depends="instrumentView">
    .........
    ..........
    
     <ora:deploy>
            <ora:parameter name="workspace"
                           value="${oracle.jdeveloper.workspace.path}"/>
           <parameter name="nodatasources" value="true"/>
           <parameter name="forcerewrite" value="true"/> 
            <ora:parameter name="profile"
                           value="${oracle.jdeveloper.deploy.profile.name}"/>
            <ora:parameter name="nocompile" value="true"/>
            <ora:parameter name="outputfile"
                           value="${oracle.jdeveloper.deploy.outputfile}"/>
          </ora:deploy>
    </target>

    Notice the nocompile option is set to true because we do not want the ojdeploy build task to overwrite the instrumented classes.


  4. Also i have added the following snippet to the copy task so that the page definitions are also included in the ear that is going to be generated
    <fileset dir="adfmsrc">
          <patternset refid="copy.patterns"/>
          </fileset>



  5. Now to build the project just run the ant task instrumentModel followed by the deploy task and you will have the ear file with instrumented build. Also you will have class metadata file coverage.em generated for each of your projects (rename either one as coverage1.em)


  6. Place only the emma.jar into lib/ext directory of the jre used by your weblogic server.


  7. Start the weblogic server, deploy the generated ear file and run through a flow and shutdown the server, you will see in the logs the location of generated coverage file (coverage.ec)


  8. Place all the three files in one directory (only for ease of use) and run the following command to generate the html report showing the code coverage done while executing your flow.



    java emma report -report html,txt -in "C:/emmareport/coverage.em" -in "C:/emmareport/coverage1.em" -in "C:/emmareport/coverage.ec" -Dreport.html.out.file="C:/emmareport/coverage.html" 

    Sample images of the report are shown below.


The sample application can be downloaded from here.

Posted on Saturday, September 29, 2012 by Unknown

May 21, 2012

I have created a custom faces component for previewing the uploaded images/flash files. It is a facelet component and the implementation is very crude but i hope that the product team can come up with something better. As other frameworks like richfaces/primefaces do have this kind of component. The component does not have any renderer  registered with it. The entire thing is being handled in the component class.

The logic on which this component is based upon is to render markup based upon content type of the file. It then generates a URL to which a servlet is registered to write the data back to the stream. There are a couple of things to note 1) The values are being stored in session so for large files this will be a issue, a better implementation can store the file in temporary location and then retrieve it, The other thing is it only handles images and flash file preview but this can be extended for text,js or pdf preview.

The component class below extends UIXOutput so as to provide partialTrigger attribute for partial update of component tree.

package com.blogspot.ramannanda.components;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Map;
import javax.faces.context.FacesContext;
import javax.faces.context.ResponseWriter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.apache.myfaces.trinidad.component.UIXOutput;


/**
 * @author Ramandeep Nanda
 */
public class RichPreviewOutput extends UIXOutput{
    public RichPreviewOutput() {
        super();
        this.setRendererType(null);
    }


  @Override
    public void encodeEnd(FacesContext facesContext) throws IOException {
        super.encodeEnd(facesContext);
        String elementType=(String)this.getAttributes().get("elementType");
        byte[] data= (byte []) this.getAttributes().get("data");
        if(elementType!=null){
            if(elementType.contains("image")){
                encodeImage(facesContext);
                }
            else
            if(elementType.contains("application/x-shockwave-flash")){
                encodeFlash(facesContext);
                }
            }
        
    }


    private void encodeImage(FacesContext facesContext) throws IOException {
       ResponseWriter writer=  facesContext.getResponseWriter();
       writer.startElement("img", this);
       String fileName=(String)getAttributes().get("filename");
       String uri= generateResourceUrl(fileName);
       String uriAttr=(String)this.getAttributes().get("uriAttr");
       String contentType=(String)this.getAttributes().get("elementType");
       writer.writeAttribute(uriAttr, uri, uriAttr);
            String style=(String)getAttributes().get("style");
            if(style==null){
            writer.writeAttribute("style","width=100px height=100px","style");
            }
            else{
                writer.writeAttribute("style", style, "style");
                }
       byte [] data=(byte[])getAttributes().get("data");
       Map map=new HashMap();
       map.put("data",data);
       map.put("contenttype",contentType);
       HttpSession session=(HttpSession) facesContext.getCurrentInstance().getExternalContext().getSession(false);
       session.setAttribute(fileName, map);
       writer.endElement("img");

       
        }

    private String generateResourceUrl(String fileName) {
       HttpServletRequest request=(HttpServletRequest)FacesContext.getCurrentInstance().getExternalContext().getRequest();
       String ctxtPath=FacesContext.getCurrentInstance().getExternalContext().getRequestContextPath().toString();
       String resPath=FacesContext.getCurrentInstance().getExternalContext().getInitParameter("ServletPath");
       ctxtPath=request.getRequestURL().toString();
       ctxtPath=ctxtPath.substring(0,ctxtPath.lastIndexOf("/faces"));
       ctxtPath=ctxtPath+resPath+"?fileName="+fileName;
       
       URI uri=null;
       String returnURI=null;
       try {
         uri=new URI(ctxtPath);
        } catch (URISyntaxException e) {
           e.printStackTrace();
        }
        try {
            System.out.println(uri.toString());
            returnURI=uri.toURL().toString();
            
        } catch (MalformedURLException e) {
         System.out.println(returnURI);
         e.printStackTrace();
        }
        return returnURI;
    }


    private void encodeFlash(FacesContext facesContext) throws IOException{
        ResponseWriter writer=  facesContext.getResponseWriter();
        writer.startElement("object", this);
        String fileName=(String)getAttributes().get("filename");
        String uri= generateResourceUrl(fileName);
        String uriAttr=(String)this.getAttributes().get("uriAttr");
        String contentType=(String)this.getAttributes().get("elementType");
        writer.writeAttribute(uriAttr, uri, uriAttr);
        writer.writeAttribute("type",contentType , "elementType");
        String style=(String)getAttributes().get("style");
        if(style==null){
        writer.writeAttribute("style","width=100px height=100px","style");
        }
        else{
            writer.writeAttribute("style", style, "style");
            }
        writer.writeAttribute(uriAttr, uri, uriAttr);
        writer.startElement("param", this);
        writer.writeAttribute("name", "flash", "param name");
        writer.writeAttribute("value",uri, "uri");
        writer.endElement("param");
        writer.startElement("a",this);
        writer.writeAttribute("href","http://www.adobe.com/go/getflash", "href");
        writer.writeText("Get Flash", null);
        writer.endElement("a");
        writer.endElement("object");
        byte [] data=(byte[])getAttributes().get("data");
        Map map=new HashMap();
        map.put("data",data);
        map.put("contenttype",contentType);
        HttpSession session=(HttpSession) facesContext.getCurrentInstance().getExternalContext().getSession(false);
        session.setAttribute(fileName, map);
        
    }
}


The tag attributes and usage is shown below.

<f:view xmlns:f="http://java.sun.com/jsf/core"
        xmlns:h="http://java.sun.com/jsf/html"
        xmlns:preview="com.blogspot.ramannanda.components"
        >
    <document title="untitled1.jsf"
          id="d1"    xmlns="http://xmlns.oracle.com/adf/faces/rich"
            >
        
        <form id="f1" usesUpload="true">
           
     <panelGroupLayout id="pgl1">
   <inputFile value="#{pageFlowScope.TestUploadBacking.file}" label="Upload" id="if1"/>
   <preview:richpreview data="#{pageFlowScope.TestUploadBacking.data.data}"
                       style="width:100px;height:100px"   elementType="#{pageFlowScope.TestUploadBacking.data.elementType}"
                          filename="#{pageFlowScope.TestUploadBacking.data.fileName}"
                         uriAttr="#{pageFlowScope.TestUploadBacking.data.uriAttr}"
                         value="#{pageFlowScope.TestUploadBacking.data.value}"/> 
    <commandButton text="Submit"
                      action="#{pageFlowScope.TestUploadBacking.uploadAction}" id="cb1"/>
  </panelGroupLayout>

        </form>
    </document>
</f:view>


Here data is a POJO class.

The attributes and their meaning for tag richpreview  is explained below
elementtype- The content type of the element. This attribute is required.
uriattr- The uri attribute. This can either be src or data and depends upon whether it is a image file or a flash file, for an image it is src and for flash it is data.This attribute is required.
data- It represents the actual data in a byte array. It is required.
filename- It is used for specifying the name of the file. It is required

Wherever you have to use the tag the registration of the resource servlet will be  required along with a init param as shown below. The ServletPath param has to be same as url pattern.

<context-param>
  <param-name>ServletPath</param-name>
  <param-value>/myservlet</param-value>
  </context-param>

  <servlet>
  <servlet-name>MyResourceServlet</servlet-name>
  <servlet-class>com.blogspot.ramannanda.DynamicResourceServlet</servlet-class>
  </servlet>
  <servlet-mapping>
  <servlet-name>MyResourceServlet</servlet-name>
  <url-pattern>/myservlet</url-pattern>
</servlet-mapping>

The entire source code for the tag library and sample usage project can be downloaded from below links.

Tag library project
Sample usage
Tag library


Posted on Monday, May 21, 2012 by Unknown

Apr 23, 2012

This is a short post about how one can use ADF security to hide/show links to the taskflows depending upon the user role. We can either use userInRole or taskflowViewable EL expressions on the visible or rendered property of the af:command link. But, choosing one over the other has repercussions on maintainability and also causes replication of security configuration in the source code   The preferred way, i think is to use taskflowViewable expression as it avoids the aforementioned problems. This can be seen by the sample usage of the aforementioned expressions below.
1. taskflowViewable :-
<af:commandLink id="cl1" rendered="#{securityContext.taskflowViewable['/pathtotaskflowdefinition/#taskflow_defn_id']}" >
  <af:setPropertyListener type="action"   from="/pathtotaskflowdefinition/#taskflow_defn_id" to="#{viewScope.NavBacking.taskFlowId}" />
              </af:commandLink>

2. isUserRole:-
<af:commandLink id="cl1" rendered="#{securityContext.userInRole['abcRole','xyzRole']}" >
   <af:setPropertyListener type="action"   from="/pathtotaskflowdefinition/#taskflow_defn_id" to="#{viewScope.NavBacking.taskFlowId}" />
              </af:commandLink>
 

As you can see the first expression is only concerned with implicitly restricting access to the users of the particular taskflow that have not been granted access in jazn-data.xml file whereas, the second expression is doing so explicitly by mentioning two specific application roles hence it is causing replication and maintenance issues.

The second snippet by explicitly specifying which users can access the taskflow is replicating the security configuration made in the jazn-data.xml and also if a need arises to grant or revoke access to the taskflow, code change will be required in the second scenario along with the merging of security policy where as in former case only thing required will be the merging of security policy. Hence you should prefer taskflowViewable over userInRole.

Posted on Monday, April 23, 2012 by Unknown

Apr 4, 2012

There is a caveat i wanted to highlight with respect to view criteria that has a performance impact.
if you use a view criteria it is applied after the query block as shown below.
This example is based on the scott schema.
select * from (select empno from emp)QRSLT where empno=2;
Now the query execution plan will be the same to the output of the following query where you filter the records inside rather than using a viewcriteria.
select empno from emp where empno=:bvar;
The plan is shown below :-

PLAN_TABLE_OUTPUT
--------------------------------------------------------------------------------
Plan hash value: 56244932

----------------------------------------------------------------------------
| Id  | Operation         | Name   | Rows  | Bytes | Cost (%CPU)| Time     |
----------------------------------------------------------------------------
|   0 | SELECT STATEMENT  |        |     1 |     4 |     0   (0)| 00:00:01 |
|*  1 |  INDEX UNIQUE SCAN| PK_EMP |     1 |     4 |     0   (0)| 00:00:01 |
----------------------------------------------------------------------------

Predicate Information (identified by operation id):
---------------------------------------------------

   1 - access("EMPNO"=TO_NUMBER(:BVAR))

But the fact that ADF view criteria where clause is applied after the main query block will have a huge performance impact in case you end up using a analytical function inside your query block as shown below.

select * from (select empno,row_number() over (order by 1) from emp) QRSLT where empno=2;

The query plan is shown below
PLAN_TABLE_OUTPUT
--------------------------------------------------------------------------------
Plan hash value: 1674134785

----------------------------------------------------------------------------
| Id  | Operation         | Name   | Rows  | Bytes | Cost (%CPU)| Time     |
----------------------------------------------------------------------------
|   0 | SELECT STATEMENT  |        |    14 |   364 |     1   (0)| 00:00:01 |
|*  1 |  VIEW             |        |    14 |   364 |     1   (0)| 00:00:01 |
|   2 |   WINDOW NOSORT   |        |    14 |    56 |     1   (0)| 00:00:01 |
|   3 |    INDEX FULL SCAN| PK_EMP |    14 |    56 |     1   (0)| 00:00:01 |
----------------------------------------------------------------------------

Predicate Information (identified by operation id):
---------------------------------------------------

   1 - filter("Q1"."EMPNO"=2)

Without view criteria

The query is shown below :-
select empno,row_number() over (order by 1) from emp where empno=:bvar

The execution plan :-
PLAN_TABLE_OUTPUT
-----------------------------------------------------------------------------------
Plan hash value: 1807209526

-----------------------------------------------------------------------------
| Id  | Operation          | Name   | Rows  | Bytes | Cost (%CPU)| Time     |
-----------------------------------------------------------------------------
|   0 | SELECT STATEMENT   |        |     1 |     4 |     0   (0)| 00:00:01 |
|   1 |  WINDOW NOSORT     |        |     1 |     4 |     0   (0)| 00:00:01 |
|*  2 |   INDEX UNIQUE SCAN| PK_EMP |     1 |     4 |     0   (0)| 00:00:01 |
-----------------------------------------------------------------------------

Predicate Information (identified by operation id):
---------------------------------------------------

   2 - access("EMPNO"=TO_NUMBER(:BVAR))

Now this plan is far better as it will firstly filter the rows and then the analytical function row_number() will be applied. You can see that in the plan output that the rows that need to be accessed are 14 (all the rows) and a full index scan (all the index blocks are going to be visited) is going to take place.

The plan output differs due to the fact that whenever you are using analytical function or a rownum clause inside your query. The query optimizer will not push predicates to the inner query block because it can effect the result set returned by the query. While in the former case the query optimizer will push the predicates inside hence execution plans are same.

Hence if you are using analytical functions or a rownum clause in your query be aware of the performance impact and always filter the rows inside.

Note:Also don't choose to ignore null values for predicates that are required and create proper indexes on the table structure depending upon how you filter results. For ex: If predicate is upper(ename)=upper(:bvar) then create a index on upper(ename) .

Posted on Wednesday, April 04, 2012 by Unknown

Mar 7, 2012

In this post i would discuss a simple workaround to the fact that ADF 11g does not support a explicit window close event. The workaround is to use a library like dojo or jquery. One should use these libraries as it will make the function cross browser compatible.

The code mentioned below logs the user out when they press the close button on browser. It essentially calls the unLoad function which makes the use of dojo.io.script.get function (this dojo function also allows cross site data access using callbacks).
<script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/dojo/1.4/dojo/dojo.xd.js" ></script>
<script type="text/javascript">
dojo.require("dojo.io.script");
var unLoad = function() {
    dojo.io.script.get({
    url:'http://127.0.0.1:7101/myapp/adfAuthentication?logout=true',
    timeout:15000,
    });
 
 
  }
  dojo.addOnWindowUnload(unLoad);
</script>

Add this code in the fragment or the jspx page. Also note that if you are adding it in a fragment make sure to add the script in a af:panelFormLayout.

Note:
The documentation states the following.
However, you need to be careful about what work you do during dojo.addOnUnload/window.onbeforeunload since 
this event can be triggered if the user 
clicks a link to download a file or a javascript: link. 
In these cases, the document will not be destroyed 
and so any destructive operations done 
during a dojo.addOnUnload callback may be premature.

Therefore you will have to ignore such events by using something like this.
<script type="text/javascript">
dojo.require("dojo.io.script");
var doUnload=true;
var unLoad = function() {
if(doUnload){
    dojo.io.script.get({
    url:'http://127.0.0.1:7101/myapp/adfAuthentication?logout=true',
    timeout:15000,
    });
  }
doUnload=true;
  };
var init=function(){
dojo.connect(dojo.doc, "onkeypress", function(e){
   switch(e.charOrCode){
        case dojo.keys.F5: doUnload=false;
        break; 
        case dojo.keys.BACKSPACE: doUnload=false;
        }
 dojo.stopEvent(e); 
});
dojo.connect(dojo.doc, "onclick", function(e){
   doUnload=false;
});

};
dojo.addOnWindowUnload(unLoad);
dojo.addOnLoad(function (){init();});
</script>


Posted on Wednesday, March 07, 2012 by Unknown

Feb 22, 2012

This is a simple procedure i created for unlocking user account and resetting the password to the original password, Although a very small utility it helps you with unlocking accounts that you tend to use after a long time which in my case were the SOA server and the BAM server user accounts.
CREATE OR REPLACE PROCEDURE "SYS"."ACCOUNT_UNLOCKER" ( p_user in varchar2 )
is
temp_password varchar2(255);
begin
select password into temp_password from sys.user$ where upper(name)=upper(p_user
);
execute immediate 'alter user '||p_user||' account unlock ' ;
execute immediate 'alter user '||p_user|| ' identified by values '''||temp_passw
ord||'''';
end;




Posted on Wednesday, February 22, 2012 by Unknown

Jan 1, 2012

Following is a technical white paper that shows you how to reset field values in ADF using ADF JavaScript client library.

View/Download

Posted on Sunday, January 01, 2012 by Unknown