OConcurrentModificationException while adding edges - graph

We're developing a system that we're basing on OrientDB graphs (OrientDB 2.1.3). In the application, we have a thin pojo->graph persistence layer that should do the work properly, but I get OConcurrentModificationException when having multiple threads updating the database.
Here's an example scenario:
Create a Product vertex with edge to Color "Blue"
Simultaneously (while the transaction for creating Product 1 is open) create another Product vertex is created and also adds an edge to Color "Blue".
OConcurrentModificationException is thrown since the version of Color "Blue" vertex is updated. Note that I'm not trying to save or modify the Color "Blue" vertex itself.
As I understood the docs at http://orientdb.com/docs/2.1/Concurrency.html#concurrency-on-adding-edges setting -DridBag.embeddedToSbtreeBonsaiThreshold=-1 should help me avoid my problem, although it still doesn't work.
What am I missing? Is there anything else I can do to avoid this?
Update:
Stacktrace of the exception:
Error on releasing database 'infogileorientdatabasetest' in pool
com.orientechnologies.orient.core.exception.OConcurrentModificationException: Cannot UPDATE the record #40:1 because the version is not the latest. Probably you are updating an old record or it has been modified by another user (db=v34 your=v33)
at com.orientechnologies.orient.core.conflict.OVersionRecordConflictStrategy.checkVersions(OVersionRecordConflictStrategy.java:55)
at com.orientechnologies.orient.core.conflict.OVersionRecordConflictStrategy.onUpdate(OVersionRecordConflictStrategy.java:42)
at com.orientechnologies.orient.core.storage.impl.local.OAbstractPaginatedStorage.checkAndIncrementVersion(OAbstractPaginatedStorage.java:2279)
at com.orientechnologies.orient.core.storage.impl.local.OAbstractPaginatedStorage.doUpdateRecord(OAbstractPaginatedStorage.java:1911)
at com.orientechnologies.orient.core.storage.impl.local.OAbstractPaginatedStorage.commitEntry(OAbstractPaginatedStorage.java:2364)
at com.orientechnologies.orient.core.storage.impl.local.OAbstractPaginatedStorage.commit(OAbstractPaginatedStorage.java:1111)
at com.orientechnologies.orient.core.tx.OTransactionOptimistic.doCommit(OTransactionOptimistic.java:609)
at com.orientechnologies.orient.core.tx.OTransactionOptimistic.commit(OTransactionOptimistic.java:156)
at com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx.commit(ODatabaseDocumentTx.java:2582)
at com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx.commit(ODatabaseDocumentTx.java:2551)
at com.orientechnologies.orient.server.network.protocol.binary.ONetworkProtocolBinary.commit(ONetworkProtocolBinary.java:1221)
at com.orientechnologies.orient.server.network.protocol.binary.ONetworkProtocolBinary.executeRequest(ONetworkProtocolBinary.java:400)
at com.orientechnologies.orient.server.network.protocol.binary.OBinaryNetworkProtocolAbstract.execute(OBinaryNetworkProtocolAbstract.java:223)
at com.orientechnologies.common.thread.OSoftThread.run(OSoftThread.java:77)
Update 2 - test case
I have reproduced the error using this test case. I would be delighted if there's something else I've done wrong to cause the problem... :-)
Update 3 Updated test case with OGlobalConfiguration.RID_BAG_EMBEDDED_TO_SBTREEBONSAI_THRESHOLD.setValue(-1) in a static block.
package se.infogile.persistence.orientdb;
import com.orientechnologies.orient.client.remote.OServerAdmin;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.OPartitionedDatabasePool;
import com.orientechnologies.orient.core.db.OPartitionedDatabasePoolFactory;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.exception.OConcurrentModificationException;
import com.orientechnologies.orient.core.exception.OConfigurationException;
import com.orientechnologies.orient.core.exception.OStorageException;
import com.orientechnologies.orient.core.tx.OTransaction;
import com.orientechnologies.orient.enterprise.channel.binary.OResponseProcessingException;
import com.orientechnologies.orient.server.OServer;
import com.orientechnologies.orient.server.OServerMain;
import com.orientechnologies.orient.server.config.OServerConfiguration;
import com.orientechnologies.orient.server.config.OServerConfigurationLoaderXml;
import com.orientechnologies.orient.server.config.OServerNetworkListenerConfiguration;
import com.tinkerpop.blueprints.Vertex;
import com.tinkerpop.blueprints.impls.orient.OrientGraph;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
/**
* Created by heintz on 14/10/15.
*/
public class OrientDBEdgeProblemTest {
static {
OGlobalConfiguration.RID_BAG_EMBEDDED_TO_SBTREEBONSAI_THRESHOLD.setValue(-1);
}
private static OPartitionedDatabasePoolFactory dbPoolFactory = new OPartitionedDatabasePoolFactory(100);
private static Logger logger = LoggerFactory.getLogger(OrientDBEdgeProblemTest.class);
private OServer server = null;
private static ExecutorService executorService = Executors.newFixedThreadPool(10);
private static final String dbName = "edgeproblemtest";
#Test
public void testVersionIncrementError() throws Throwable {
OrientGraph graph = getGraph(dbName);
graph.getRawGraph().setDefaultTransactionMode();
graph.createVertexType("Product");
graph.createVertexType("Color");
graph.createEdgeType("HasColor");
graph.getRawGraph().begin(OTransaction.TXTYPE.OPTIMISTIC);
// graph.begin();
Vertex v1 = graph.addVertex("Color", "name", "Blue");
graph.commit();
graph.shutdown();
char[] alphabet = new char[] {'A','B','C','D','E','F','G'};
List<Future> futures = new ArrayList<>();
for (int i = 0; i < 2; i++) {
int pos = i;
futures.add(executorService.submit(new Callable<Object>() {
#Override
public Object call() throws Exception {
OrientGraph g = getGraph(dbName);
try {
g.begin();
Vertex v2 = g.addVertex("Product", "name", "Product "+alphabet[pos]);
g.addEdge(null, v2, v1, "HasColor");
Thread.sleep(200);
g.commit();
} catch (OConcurrentModificationException ocme) {
logger.error("Exception while saving: ", ocme);
Assert.fail("OConcurrentModificationException");
} finally {
g.shutdown();
}
return null;
}
}));
}
for (Future f : futures) {
f.get();
}
executorService.shutdown();
executorService.awaitTermination(5, TimeUnit.SECONDS);
}
#AfterSuite
public void tearDown() throws Exception {
logger.info("Shutting down OrientDB");
if (server != null) {
server.shutdown();
}
}
private OrientGraph getGraph(String dbName) {
String _db = "remote:localhost:3424";
String url = _db + "/" + dbName;
ODatabaseDocumentTx db = null;
try {
OPartitionedDatabasePool pool = dbPoolFactory.get(url,
"root",
"admin");
db = pool.acquire();
} catch (OResponseProcessingException | OConfigurationException | OStorageException oce) {
try {
logger.info("creating new database named " + dbName);
System.err.println("Before DB creation");
OServerAdmin serverAdmin = new OServerAdmin(_db).connect(
"root",
"admin"
);
serverAdmin.createDatabase(dbName, "document", "plocal");
serverAdmin.close();
System.err.println("After DB creation");
} catch (IOException ex) {
logger.error("Unable to create database " + dbName, ex);
}
OPartitionedDatabasePool pool = dbPoolFactory.get(url,
"root",
"admin");
db = pool.acquire();
}
return new OrientGraph(db);
}
#BeforeSuite
public void setUpDatabase() throws Exception {
File f = new File(".");
InputStream is = GraphPersistenceServiceTest.class.getResourceAsStream("/orientdb.config");
Assert.assertNotNull(is);
logger.info("Starting OrientDB");
server = OServerMain.create();
OServerConfigurationLoaderXml loaderXml = new OServerConfigurationLoaderXml(OServerConfiguration.class, GraphPersistenceServiceTest.class.getResourceAsStream("/orientdb.config"));
OServerConfiguration oServerConfiguration = new OServerConfiguration(loaderXml);
System.setProperty("ORIENTDB_ROOT_PASSWORD", "admin");
System.setProperty("RUNMODE", "UNITTEST");
OServerNetworkListenerConfiguration networkConfig = oServerConfiguration.network.listeners.iterator().next();
networkConfig.portRange = "3424-3430";
server.setServerRootDirectory("./target/orientdb");
server.startup(oServerConfiguration);
File serverDir = new File("./target/orientdb");
if (serverDir.exists()) {
FileUtils.deleteDirectory(serverDir);
}
serverDir.mkdirs();
File dbDir = new File(serverDir, "databases");
dbDir.mkdirs();
server.activate();
OGlobalConfiguration.dumpConfiguration(System.out);
Thread.sleep(2000);
}
}

Hi that is because when you add edges to the vertex, vertex itself is modified to store this information, but you may work in mode when information about edges is stored in separate object. Merely use property
-DridBag.embeddedToSbtreeBonsaiThreshold=true and you will rid off this exception.

Related

Quarkus and reactive datasources - Error Multiple matching properties for name "datasource.url"

I have a problem connecting to the postgres database using PgPool and ResulSet, then the Statement of sql. Here is my class of service.
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import javax.servlet.http.HttpSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import org.springframework.web.context.annotation.ApplicationScope;
//import io.vertx.reactivex.pgclient.PgPool;
import io.vertx.axle.pgclient.PgPool;
import ml.kalansow.domain.StudentFees;
import ml.kalansow.service.KalansowService;
#Service
#ApplicationScope
public class StudentFeesService implements KalansowService {
private static final Logger LOG = LoggerFactory.getLogger(StudentFeesService.class);
PgPool client;
// ----Constructor------------------------------------------
public StudentFeesService() {
// TODO Auto-generated constructor stub
};
// ------------------------------Method---------------------------------
#Override
public String getServiceName() {
return this.getClass().getName();
}
public void processGetFeesDetails(HttpSession session) {
String strStudentId = (String) session.getAttribute("StudentId");
StudentFees studentFees = new StudentFees();
if (strStudentId != null) {
// This is mandatory before calling the next method
studentFees.setStudentId(strStudentId);
populateFeesInfo(studentFees);
session.setAttribute("studentFees", studentFees);
} else {
LOG.error("Student Id is null");
}
}
private void populateFeesInfo(StudentFees studentFees) {
String strStudentI = studentFees.getStudentId();
io.vertx.sqlclient.impl.Connection connection = null;
Statement statement = null;
ResultSet resultSet = null;
StringBuffer sbQuery = new StringBuffer();
sbQuery.append("SELECT * FROM STUDENT_FEES WHERE STUDENT_I=");
sbQuery.append("" + strStudentI + "''");
if (strStudentI != null) {
//connection = DatabaseService.getDBConnection();
connection=(io.vertx.sqlclient.impl.Connection) client.getConnection();
try {
statement = ((Connection) connection).createStatement();
resultSet = statement.executeQuery(sbQuery.toString());
resultSet.next();
studentFees.setJanAcad(resultSet.getString("JAN_ACAD"));
studentFees.setFebAcad(resultSet.getString("FEB_ACAD"));
studentFees.setMarAcad(resultSet.getString("MAR_ACAD"));
studentFees.setAprAcad(resultSet.getString("APR_ACAD"));
studentFees.setMayAcad(resultSet.getString("MAY_ACAD"));
studentFees.setJunAcad(resultSet.getString("JUN_ACAD"));
studentFees.setJulAcad(resultSet.getString("JUL_ACAD"));
studentFees.setAugAcad(resultSet.getString("AUG_ACAD"));
studentFees.setSepAcad(resultSet.getString("SEP_ACAD"));
studentFees.setOctAcad(resultSet.getString("OCT_ACAD"));
studentFees.setNovAcad(resultSet.getString("NOV_ACAD"));
studentFees.setDecAcad(resultSet.getString("DEC_ACAD"));
} catch (SQLException e) {
LOG.error(e.getMessage());
} finally {
/*DatabaseService.closeDBConnection(statement, resultSet);
DatabaseService.realeaseDBConnection();*/
client.close();
}
} else {
LOG.error("Student id is null");
}
}
}
My application properties file contain datasource properties
quarkus.datasource.driver=org.postgresql.Driver
quarkus.reactive-datasource.url=vertx-reactive:postgresql://localhost:5432/test
quarkus.reactive-datasource.username=test
quarkus.reactive-datasource.password=test
And console is here
ERROR [io.qua.dev.DevModeMain] Failed to start Quarkus: java.lang.IllegalArgumentException: Multiple matching properties for name "datasource.url" property was matched by both public java.util.Optional io.quarkus.agroal.runtime.DataSourceRuntimeConfig.url and public java.util.Optional io.quarkus.reactive.pg.client.runtime.DataSourceConfig.url. This is likely because you have an incompatible combination of extensions that both define the same properties (e.g. including both reactive and blocking database extensions)
at io.quarkus.deployment.configuration.matching.PatternMapBuilder.addMember(PatternMapBuilder.java:71)
at io.quarkus.deployment.configuration.matching.PatternMapBuilder.addGroup(PatternMapBuilder.java:60)
at io.quarkus.deployment.configuration.matching.PatternMapBuilder.addMember(PatternMapBuilder.java:85)
at io.quarkus.deployment.configuration.matching.PatternMapBuilder.addGroup(PatternMapBuilder.java:60)
at io.quarkus.deployment.configuration.matching.PatternMapBuilder.makePatterns(PatternMapBuilder.java:35)
at io.quarkus.deployment.configuration.BuildTimeConfigurationReader.(BuildTimeConfigurationReader.java:107)
at io.quarkus.deployment.ExtensionLoader.loadStepsFrom(ExtensionLoader.java:174)
at io.quarkus.deployment.QuarkusAugmentor.run(QuarkusAugmentor.java:85)
at io.quarkus.runner.RuntimeRunner.run(RuntimeRunner.java:114)
at io.quarkus.dev.DevModeMain.doStart(DevModeMain.java:178)
at io.quarkus.dev.DevModeMain.start(DevModeMain.java:96)
You cannot use both the Agroal extension and the Reactive datasources together for the time being.
We are discussing possible ways to fix that here: https://groups.google.com/d/msg/quarkus-dev/3r0lquVsUsc/DVxX7SvQAQAJ .
But for now, your only choice is to use either one or the other.

Extent Report showing only one test case result when executing cucumber scripts in parallel using Cucable plugin

I have to generate Extent Report from all executed test scripts. I am running scripts in parallel. When I use TestNG or Selenium Grid for parallel execution, in those implementation, Extent Reports are getting generated perfectly covering each executed test scripts. But when I run scripts in parallel using Cucable Plugin, Extent report gets generated but would have only 1 test case report if 2 test cases were there in execution.
I am using Cucumber (Selenium), Junit Suite Runner, Cucable Plugin
I verified Extent Report code is thread safe. So not sure, why only in case of Cucable Plugin, Extent report gets only 1 test case. Someone told me, In case of testNG, testNG itself provides additional thread safe mechanism which helps internally to have all executed test cases in report.
ExtentTestManager.java
package com.jacksparrow.automation.extent.listeners;
import java.io.IOException;
import com.aventstack.extentreports.ExtentReports;
import com.aventstack.extentreports.ExtentTest;
import com.aventstack.extentreports.MediaEntityBuilder;
import com.aventstack.extentreports.Status;
import com.aventstack.extentreports.markuputils.ExtentColor;
import com.aventstack.extentreports.markuputils.Markup;
import com.aventstack.extentreports.markuputils.MarkupHelper;
public class ExtentTestManager {
public static ThreadLocal<ExtentTest> testReport = new ThreadLocal<ExtentTest>();
static ExtentReports extent = ExtentManager.getReporter();
public static synchronized ExtentTest getTest() {
return testReport.get();
}
public static synchronized void setTest(ExtentTest tst)
{
testReport.set(tst);
}
public static synchronized void logInfo(String message) {
testReport.get().info(message);
}
public static synchronized void logPass(String message) {
testReport.get().pass(message);
}
public static synchronized void scenarioPass() {
String passLogg = "SCENARIO PASSED";
Markup m = MarkupHelper.createLabel(passLogg, ExtentColor.GREEN);
testReport.get().log(Status.PASS, m);
}
public static synchronized void logFail(String message) {
testReport.get().fail(message);
}
public static synchronized boolean addScreenShotsOnFailure() {
ExtentManager.captureScreenshot();
try {
testReport.get().fail("<b>" + "<font color=" + "red>" + "Screenshot of failure" + "</font>" + "</b>",
MediaEntityBuilder.createScreenCaptureFromPath(ExtentManager.screenshotName).build());
} catch (IOException e) {
}
String failureLogg = "SCENARIO FAILED";
Markup m = MarkupHelper.createLabel(failureLogg, ExtentColor.RED);
testReport.get().log(Status.FAIL, m);
return true;
}
public static synchronized boolean addScreenShots() {
ExtentManager.captureScreenshot();
try {
testReport.get().info(("<b>" + "<font color=" + "green>" + "Screenshot" + "</font>" + "</b>"),
MediaEntityBuilder.createScreenCaptureFromPath(ExtentManager.screenshotName).build());
} catch (IOException e) {
e.printStackTrace();
}
return true;
}
public static synchronized ExtentTest startTest(String testName) {
return startTest(testName, "");
}
public static synchronized ExtentTest startTest(String testName, String desc) {
ExtentTest test = extent.createTest(testName, desc);
testReport.set(test);
return test;
}
}
ExtentManager.java
package com.jacksparrow.automation.extent.listeners;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.commons.io.FileUtils;
import org.openqa.selenium.OutputType;
import org.openqa.selenium.TakesScreenshot;
import com.jacksparrow.automation.utilities.DriverManager;
import com.aventstack.extentreports.AnalysisStrategy;
import com.aventstack.extentreports.ExtentReports;
import com.aventstack.extentreports.reporter.ExtentHtmlReporter;
import com.aventstack.extentreports.reporter.configuration.ChartLocation;
import com.aventstack.extentreports.reporter.configuration.Theme;
public class ExtentManager {
static ExtentReports extent;
static Date d = new Date();
static String fileName = "Extent_" + d.toString().replace(":", "_").replace(" ", "_") + ".html";
public synchronized static ExtentReports getReporter() {
if (extent == null) {
ExtentHtmlReporter htmlReporter = new ExtentHtmlReporter(System.getProperty("user.dir")+"/target/extent-report/"+fileName);
htmlReporter.loadXMLConfig(".\\src\\test\\resources\\extent-config.xml");
htmlReporter.config().setTestViewChartLocation(ChartLocation.BOTTOM);
htmlReporter.config().setChartVisibilityOnOpen(true);
htmlReporter.config().setTheme(Theme.STANDARD);
htmlReporter.config().setDocumentTitle(fileName);
htmlReporter.config().setEncoding("utf-8");
htmlReporter.config().setReportName(fileName);
//htmlReporter.setAppendExisting(true);
extent = new ExtentReports();
extent.setAnalysisStrategy(AnalysisStrategy.TEST);
extent.attachReporter(htmlReporter);
extent.setSystemInfo("Automation Analyst", "Robin Tyagi");
extent.setSystemInfo("Organization", "Way2Automation");
extent.setSystemInfo("Build no", "W2A-1234");
}
return extent;
}
public static String screenshotPath;
public static String screenshotName;
static int i=0;
public static void captureScreenshot() {
i = i + 1;
File scrFile = ((TakesScreenshot) DriverManager.getDriver()).getScreenshotAs(OutputType.FILE);
Date d = new Date();
SimpleDateFormat formatter = new SimpleDateFormat("E dd MMM HH:mm:ss z yyyy");
String strDate = formatter.format(d);
screenshotName = strDate.replace(":", "_").replace(" ", "_") + "_"+i+".jpg";
try {
FileUtils.copyFile(scrFile, new File(System.getProperty("user.dir") + "/target/extent-report/" + screenshotName));
} catch (IOException e) {
e.printStackTrace();
}
}
public static void createExtentReportDirectory() {
File file = new File(System.getProperty("user.dir") + "/target/extent-report/");
if (!file.exists()) {
if (file.mkdir()) {
} else {
}
}
}
}
Please help me to understand what could be the correct thought in order to generate Extent Report having summary of all executed test scripts when Cucable Plugin is used for achieving parallel execution in Cucumber (Selenium)
After migrating to cucumber 4.0, I am able to generate single consolidated extent report. Thank you.

Alfresco delete node permanently using a web service

I am trying to delete a node in Alfresco(CE, v. 2.1) permanently(without moving it to trash can first) using web service.
My use case is this: create a standalone job that queries the repo for files older than 2 years and delete them permanently using web service. Then the cleanup job moves the deleted files to content.deleted which I can safely delete.
Below is my code
import org.alfresco.webservice.repository.RepositoryServiceSoapBindingStub;
import org.alfresco.webservice.types.CML;
import org.alfresco.webservice.types.CMLAddAspect;
import org.alfresco.webservice.types.CMLDelete;
import org.alfresco.webservice.types.Node;
import org.alfresco.webservice.types.Predicate;
import org.alfresco.webservice.types.Reference;
import org.alfresco.webservice.types.Store;
import org.alfresco.webservice.util.AuthenticationUtils;
import org.alfresco.webservice.util.Constants;
import org.alfresco.webservice.util.ContentUtils;
import org.alfresco.webservice.util.WebServiceFactory;
public class NodeDeletionService {
protected static final Store STORE = new Store(Constants.WORKSPACE_STORE,
"SpacesStore");
public static void main(String[] args) {
try {
AuthenticationUtils.startSession("admin", "admin");
RepositoryServiceSoapBindingStub repositoryService = WebServiceFactory
.getRepositoryService();
Reference reference = new Reference(STORE,
"8abb6223-11bb-11e4-a335-65e3a9c8626a", null);
Predicate predicate = new Predicate(new Reference[] { reference },
null, null);
Node[] nodes = repositoryService.get(predicate);
CMLDelete delete = new CMLDelete(predicate);
CML cml = new CML();
cml.setDelete(new CMLDelete[] { delete });
CMLAddAspect addAspect = new CMLAddAspect("temporary", null, predicate, null);
cml.setAddAspect(new CMLAddAspect[]{addAspect});
// Execute the CMLDelete statement
try {
WebServiceFactory.getRepositoryService().update(cml);
System.out.println("Deleted succesfully");
} catch (Exception e2) {
System.err.println("Can not delete the space.");
throw e2;
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
AuthenticationUtils.endSession();
}
}
}
But I get an exception
{http://www.alfresco.org/ws/service/repository/1.0}RepositoryFault:<ns1:errorCode>0</ns1:errorCode><ns1:message>org.alfresco.service.cmr.dictionary.InvalidAspectException: The aspect is invalid: {}temporary</ns1:message>
{http://xml.apache.org/axis/}exceptionName:org.alfresco.repo.webservice.repository.RepositoryFault
I tried different aspects like temporary, cm:temporary, sys:temporary - but all in vain. Any idea?

When the SQLiteOpenHelper onCreate method is called?

I tried to create an SQLite database and do some stuff with it. But I found that my onCreate method is not even invoked!!
I am sending a message to LogCat on the begining of the onCreate method.
My assumption is, the (super) constructor will invoke onCreate method. Is that right?
My Code:
import android.database.sqlite.SQLiteOpenHelper;
import android.database.sqlite.SQLiteDatabase;
import android.content.Context;
import android.database.Cursor;
import android.content.ContentValues;
import android.util.Log;
public class DatabaseHandler extends SQLiteOpenHelper {
// Static Constants
/*** Database details ***/
// Database version
private static final int DATABASE_VERSION = 1;
// Database name
private static final String DATABASE_NAME = "database_name";
/*** Database Tables ***/
/** Events **/
// Event table
private static final String TABLE_EVENT = "event";
// Event table columns
private static final String COLUMN_EVENT_EID = "_eid";
private static final String COLUMN_EVENT_CREATION_DATE = "creation_date";
private static final String COLUMN_EVENT_TITLE = "title";
private static final String COLUMN_EVENT_ICON = "icon";
public DatabaseHandler(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
}
#Override
public void onCreate(SQLiteDatabase db) {
Log.e("MyApp", "onCreate invoked");
// Tables creation queries
String CREATE_EVENT_TABLE = "create table " + TABLE_EVENT + "(" + COLUMN_EVENT_EID + " integer primary key, "
+ COLUMN_EVENT_CREATION_DATE + " text, "
+ COLUMN_EVENT_TITLE + " text, "
+ COLUMN_EVENT_ICON + " text)";
// Creating tables
db.execSQL(CREATE_EVENT_TABLE);
}
#Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
Log.e("MyApp", "onUpgrade invoked");
db.execSQL("DROP TABLE IF EXISTS " + TABLE_EVENT);
}
}
MainActivity Code:
import android.os.Bundle;
import android.app.Activity;
import android.view.Menu;
public class MainActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
DatabaseHandler db = new DatabaseHandler(this);
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
The documentation says:
The database is not actually created or opened until one of getWritableDatabase() or getReadableDatabase() is called.
Let me clear the on the logic flow. Here is Lazy-initialization concept.
The (super) constructor on DatabaseHandler will not invoke onCreate method. Calling DatabaseHandler constructor will initializes: context, database name, factory that creates the database, database version, and database error handler.
getWritableDatabase() > getDatabaseLocked() > - SQLiteDatabase.create()
OR
getReadableDatabase() > getDatabaseLocked() > - SQLiteDatabase.create()
Answer: After your database gets created successfully, your configurations changes, next time again getReadableDatabase() or getWritableDatabase() calls getDatabaseLocked() and there onCreate(db) method inside getDatabaseLocked() gets executed.
Explanation:
The above SQLiteDatabase.create() method is responsible to create SQLiteDatabase in the disk.
But the process in lazy-initialization (mean, it doesn't make everything ready. It creates those objects on the runtime if you need them. For this it used a lot of if..else statements).
If you see the full body of getDatabaseLocked(), this is below. [You can search onCreate() method inside the body of getDatabaseLocked()]
private SQLiteDatabase getDatabaseLocked(boolean writable) {
if (mDatabase != null) {
if (!mDatabase.isOpen()) {
// Darn! The user closed the database by calling mDatabase.close().
mDatabase = null;
} else if (!writable || !mDatabase.isReadOnly()) {
// The database is already open for business.
return mDatabase;
}
}
if (mIsInitializing) {
throw new IllegalStateException("getDatabase called recursively");
}
SQLiteDatabase db = mDatabase;
try {
mIsInitializing = true;
if (db != null) {
if (writable && db.isReadOnly()) {
db.reopenReadWrite();
}
} else if (mName == null) {
db = SQLiteDatabase.create(null);
} else {
try {
if (DEBUG_STRICT_READONLY && !writable) {
final String path = mContext.getDatabasePath(mName).getPath();
db = SQLiteDatabase.openDatabase(path, mFactory,
SQLiteDatabase.OPEN_READONLY, mErrorHandler);
} else {
db = mContext.openOrCreateDatabase(mName, mEnableWriteAheadLogging ?
Context.MODE_ENABLE_WRITE_AHEAD_LOGGING : 0,
mFactory, mErrorHandler);
}
} catch (SQLiteException ex) {
if (writable) {
throw ex;
}
Log.e(TAG, "Couldn't open " + mName
+ " for writing (will try read-only):", ex);
final String path = mContext.getDatabasePath(mName).getPath();
db = SQLiteDatabase.openDatabase(path, mFactory,
SQLiteDatabase.OPEN_READONLY, mErrorHandler);
}
}
onConfigure(db);
final int version = db.getVersion();
if (version != mNewVersion) {
if (db.isReadOnly()) {
throw new SQLiteException("Can't upgrade read-only database from version " +
db.getVersion() + " to " + mNewVersion + ": " + mName);
}
db.beginTransaction();
try {
if (version == 0) {
onCreate(db);
} else {
if (version > mNewVersion) {
onDowngrade(db, version, mNewVersion);
} else {
onUpgrade(db, version, mNewVersion);
}
}
db.setVersion(mNewVersion);
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
}
onOpen(db);
if (db.isReadOnly()) {
Log.w(TAG, "Opened " + mName + " in read-only mode");
}
mDatabase = db;
return db;
} finally {
mIsInitializing = false;
if (db != null && db != mDatabase) {
db.close();
}
}
}
Please note, inside the body of getDatabaseLocked() method, there are so many if.. else cases. These if.. else cases determines your current environment (configuration), and based on your current environment they call appropriate methods to initialize/configure whatever needed.
Also, note: All the callbacks methods in your DatabaseHandler (class that implemented SQLiteOpenHelper) are called inside the getDatabaseLocked() body.
Source code SQLiteOpenHelper.java:
https://android.googlesource.com/platform/frameworks/base/+/refs/heads/master/core/java/android/database/sqlite/SQLiteOpenHelper.java
Source code SQLiteDatabase.java:
https://android.googlesource.com/platform/frameworks/base/+/master/core/java/android/database/sqlite/SQLiteDatabase.java
Sample to follow: https://github.com/uddhavgautam/SQLiteBasicSample
Your are right, the (super) constructor will invoke onCreate method, BUT only if the actual database does not exits.
From http://developer.android.com/reference/android/database/sqlite/SQLiteOpenHelper.html#onCreate%28android.database.sqlite.SQLiteDatabase%29
A helper class to manage database creation and version management.
You create a subclass implementing onCreate(SQLiteDatabase),
onUpgrade(SQLiteDatabase, int, int) and optionally
onOpen(SQLiteDatabase), and this class takes care of opening the
database if it exists, creating it if it does not, and upgrading it as
necessary.
As the official documents says, "getWritableDatabase () Create and/or open a database that will be used for reading and writing. The first time this is called, the database will be opened and onCreate(SQLiteDatabase), onUpgrade(SQLiteDatabase, int, int) and/or onOpen(SQLiteDatabase) will be called."
Once opened successfully, the database is cached, so you can call this method every time you need to write to the database. (Make sure to call close() when you no longer need the database.) Errors such as bad permissions or a full disk may cause this method to fail, but future attempts may succeed if the problem is fixed.
http://developer.android.com/reference/android/database/sqlite/SQLiteOpenHelper.html#getWritableDatabase()

Components details are not getting logged in my database when unpublishing the page using Tridion Deployer extension

I am trying to add the unpublished components entry in my custom storage extension. We know that we don't have any base class in Tridion for ComponentUndeploy as we have for deploy "ComponentDeploy", so I am trying to use ComponentPresentationUndeploy class to track the components which are getting and below is sample code how I am trying to track.
package com.tridion.custom.extensions;
import com.tridion.broker.StorageException;
import com.tridion.configuration.Configuration;
import com.tridion.configuration.ConfigurationException;
import com.tridion.deployer.DeploymentHandler;
import com.tridion.deployer.ProcessingException;
import com.tridion.deployer.Processor;
import com.tridion.deployer.modules.ComponentPresentationUndeploy;
import com.tridion.storage.ComponentMeta;
import com.tridion.storage.StorageManagerFactory;
import com.tridion.storage.StorageTypeMapping;
import com.tridion.storage.dao.ItemDAO;
import com.tridion.storage.dao.ItemTypeSelector;
import com.tridion.storage.dao.PublishAction;
import com.tridion.storage.dao.PublishActionDAO;
import com.tridion.storage.mapper.MapperFactory;
import com.tridion.transport.transportpackage.ComponentPresentationKey;
import com.tridion.transport.transportpackage.ProcessorInstructions;
import com.tridion.transport.transportpackage.TransportPackage;
import com.tridion.util.TCDURI;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
import java.util.Iterator;
public class SearchComponentUndeployer extends ComponentPresentationUndeploy {
private static Logger log = LoggerFactory
.getLogger(SearchComponentUndeployer.class);
public SearchComponentUndeployer(Configuration paramConfiguration,
Processor paramProcessor) throws ConfigurationException {
super(paramConfiguration, paramProcessor);
}
#SuppressWarnings("rawtypes")
public void process(TransportPackage paramTransportPackage) throws ProcessingException
{
ProcessorInstructions localProcessorInstructions = paramTransportPackage.getProcessorInstructions();
try
{
Iterator localIterator = localProcessorInstructions.getArguments();
while (localIterator.hasNext())
{
Object localObject = localIterator.next();
if (localObject instanceof ComponentPresentationKey)
{
ComponentPresentationKey localComponentPresentationKey = (ComponentPresentationKey) localObject;
long[] arrayOfLong = new long[2];
arrayOfLong[0] = localComponentPresentationKey.getComponentKey().getId().getItemId();
arrayOfLong[1] = localComponentPresentationKey.getTemplateKey().getId().getItemId();
int PubID = localComponentPresentationKey.getComponentKey().getId().getPublicationId();
String tcmID = Integer.toString(localComponentPresentationKey.getComponentKey().getId().getItemId());
log.info("SearchComponentUndeployer -PubID" + PubID);
log.info("SearchComponentUndeployer -tcmID" + tcmID);
ItemDAO itemDAO = ((ItemDAO) StorageManagerFactory.getDAO(PubID, StorageTypeMapping.COMPONENT_META));
log.info("SearchComponentUndeployer -itemDAO"+ itemDAO.getStorageId());
ComponentMeta compObject = (ComponentMeta) MapperFactory.mapItemMetaInstance(itemDAO.findByPrimaryKey(PubID, localComponentPresentationKey.getComponentKey().getId().getItemId(),ItemTypeSelector.COMPONENT));
log.info("SearchComponentUndeployer -compObject"+ compObject.getTitle());
String formatTCMID = String.format("tcm:%d-%s-64", PubID,tcmID);
log.info("SearchComponentUndeployer - formatTCMID -"+ formatTCMID);
String strIgnorePubIds = "232,481";
String strPubId = Integer.toString(PubID);
Date lastPublishedDate = compObject.getLastPublishDate();
String schemaID = Integer.toString(compObject.getSchemaId());
if (!strIgnorePubIds.contains(strPubId))
{
PublishAction publishAction = new PublishAction();
publishAction.setAction("DEL");
publishAction.setTcmUri(formatTCMID);
publishAction.setItemType(16);
publishAction.setPublicationID(PubID);
publishAction.setLastPublishedDate(lastPublishedDate);
publishAction.setSchemaID(schemaID);
PublishActionDAO publishActionDAO = (PublishActionDAO) StorageManagerFactory.getDefaultDAO("PublishAction");
log.debug("SearchComponentUndeployer Going to Store bean -" + publishAction.toString());
publishAction = publishActionDAO.store(publishAction);
log.debug("SearchComponentUndeployer Stored bean -" + publishAction);
}
DeploymentHandler.undeploy(new TCDURI(PubID, 73014444080L, arrayOfLong));
}
}
}
catch (StorageException e)
{
log.error("Could not undeploy component presentation", e);
}
}
}
Any idea why I am not getting any entry for components in my database
Edit: Added sample code from PageUndeploy implementation done by me:
Object argument = iterator.next();
if (argument instanceof PageKey)
{
PageKey pageKey = (PageKey) argument;
TCDURI pageMetaURI = new TCDURI(pageKey.getId() .getPublicationId(), 1168231104576L, pageKey.getId().getItemId());
PageMeta pageMeta = this.pageMetaHome.findByPrimaryKey(pageMetaURI.getPublicationId(),(int) pageMetaURI.getItemId());
if (pageMeta == null)
{
DeploymentHandler.undeploy(pageMetaURI);
}
else
{
//Here I need to loop for componentpresentation and get component object
}
}
You can try this as I just taken class name from your input
List<ComponentPresentationMeta> lstCompObjects= pageMeta.getComponentPresentationMetas();
if(lstCompObjects != null && !lstCompObjects.isEmpty())
{
for(ComponentPresentationMeta compMeta : lstCompObjects)
{
String compID = Integer.toString(compMeta.getComponentId());
ItemDAO itemDAO = ((ItemDAO) StorageManagerFactory.getDAO(compMeta.getPublicationId(), StorageTypeMapping.COMPONENT_META));
ComponentMeta compObject = (ComponentMeta) MapperFactory.mapItemMetaInstance(itemDAO.findByPrimaryKey(compMeta.getPublicationId(), compMeta.getComponentId(),ItemTypeSelector.COMPONENT));
PublishAction compPublishAction = new PublishAction();
compPublishAction.setAction("DEL");
compPublishAction.setTcmUri(compID);
compPublishAction.setItemType(16);
compPublishAction.setPublicationID(compMeta.getPublicationId());
compPublishAction.setLastPublishedDate(compObject.getLastPublicationDate());
compPublishAction.setSchemaID(Integer.toString(compObject.getSchemaId()));
compPublishAction = publishActionDAO.store(compPublishAction);
}
}

Resources