Я много читал о настройке гибернации для сохранения большого количества новых объектов, но что-то упустил, поскольку производительность вставки действительно плохая ...
Вот моя конфигурация
Конфигурация гибернации:
@Configuration
@EnableTransactionManagement
@ComponentScans(value = { @ComponentScan("com.xxxxx)})
public class HibernateConfiguration
{
@Bean
public LocalSessionFactoryBean sessionFactory() {
LocalSessionFactoryBean sessionFactory = new LocalSessionFactoryBean();
sessionFactory.setDataSource(dataSource());
sessionFactory.setPackagesToScan(new String[] { "com.xxxx" });
sessionFactory.setHibernateProperties(hibernateProperties());
return sessionFactory;
}
@Bean
public DataSource dataSource() {
BasicDataSource dataSource = new BasicDataSource();
dataSource.setUrl(XXXX);
dataSource.setUsername(XXXX);
dataSource.setPassword(XXXX);
dataSource.setTestOnBorrow(true);
dataSource.setValidationQuery("SELECT 1");
dataSource.setInitialSize(3);
dataSource.setMaxActive(10);
return dataSource;
}
private Properties hibernateProperties() {
Properties properties = new Properties();
properties.put(Environment.DIALECT, "org.hibernate.dialect.PostgreSQL95Dialect");
properties.put(Environment.SHOW_SQL, false);
properties.put(Environment.FORMAT_SQL, false);
properties.put("hibernate.default-lazy", true);
properties.put(Environment.USE_NEW_ID_GENERATOR_MAPPINGS, true);
properties.put(Environment.HBM2DDL_AUTO, "none");
properties.put(Environment.STATEMENT_BATCH_SIZE, 50);
properties.put(Environment.STATEMENT_FETCH_SIZE, 400);
properties.put(Environment.ORDER_INSERTS, true);
properties.put(Environment.ORDER_UPDATES, true);
properties.put(Environment.BATCH_VERSIONED_DATA, true);
properties.put(Environment.GENERATE_STATISTICS, true);
properties.put(Environment.HQL_BULK_ID_STRATEGY, InlineIdsSubSelectValueListBulkIdStrategy.class);
return properties;
}
@Bean
public HibernateTransactionManager transactionManager()
{
HibernateTransactionManager txManager = new HibernateTransactionManager();
txManager.setSessionFactory(sessionFactory().getObject());
return txManager;
}
@Bean
public PersistenceExceptionTranslationPostProcessor exceptionTranslation()
{
return new PersistenceExceptionTranslationPostProcessor();
}
}
Для моего теста я создаю простую таблицу без какого-либо отношения
Entity:
@Entity
@Table(name="performance_test")
@NamedQuery(name="PerformanceTest.findAll", query="SELECT t FROM PerformanceTest t")
public class PerformanceTest
{
@Id
@Id
@GenericGenerator(
name = "PERFORMANCE_TEST_ID_GENERATOR",
strategy = "org.hibernate.id.enhanced.SequenceStyleGenerator",
parameters = {
@Parameter(name = "sequence_name", value = "performance_test_id_seq"),
@Parameter(name = "optimizer", value = "pooled-lo"),
@Parameter(name = "increment_size", value = "1")
}
)
@GeneratedValue(strategy=GenerationType.SEQUENCE, generator="PERFORMANCE_TEST_ID_GENERATOR")
private Long id;
private Long id;
@Column(name="first_name")
private String firstName;
@Column(name="last_name")
private String lastName;
@Column(name="salary")
private Integer salary;
public PerformanceTest(){};
public Long getId()
{
return id;
}
public void setId(Long id)
{
this.id = id;
}
public String getFirstName()
{
return firstName;
}
public void setFirstName(String firstName)
{
this.firstName = firstName;
}
public String getLastName()
{
return lastName;
}
public void setLastName(String lastName)
{
this.lastName = lastName;
}
public Integer getSalary()
{
return salary;
}
public void setSalary(Integer salary)
{
this.salary = salary;
}
}
Реализация DAO (я использовал обаметод без значительного улучшения
@Override
public void saveBulkElement (Set<T> listOfElement,Integer bulkSize)
{
if(listOfElement == null || listOfElement.size() == 0)
return;
Session session = sessionFactory.openSession();
Transaction tx = session.beginTransaction();
session.setJdbcBatchSize(batchSize);
try
{
int flushIndex = 0;
Iterator<T> ite = listOfElement.iterator();
while (ite.hasNext())
{
T element = (T) ite.next();
session.persist(element);
flushIndex++;
int size = bulkSize != null ? bulkSize:batchSize;
if((flushIndex % size == 0 && flushIndex > 0) || !ite.hasNext())
{
session.flush();
session.clear();
}
}
tx.commit();
}
catch (HibernateException e)
{
if (tx != null)
tx.rollback();
}
finally
{
session.close();
}
}
@Override
public void saveStatelessBulkElement (Set<T> listOfElement)
{
if(listOfElement == null || listOfElement.size() == 0)
return;
StatelessSession session = sessionFactory.openStatelessSession();
Transaction tx = session.beginTransaction();
session.setJdbcBatchSize(listOfElement.size() < statelessBatchSize ? listOfElement.size():statelessBatchSize);
try
{
Iterator<T> ite = listOfElement.iterator();
while (ite.hasNext())
{
T element = (T) ite.next();
session.insert(element);
}
tx.commit();
}
catch (HibernateException e)
{
if (tx != null)
tx.rollback();
}
finally
{
session.close();
}
}
Мой тест был действительно простым добавлением 100 новых элементов, я установил hibernate для отображения статистики
Вот что я получил:
[StatisticalLoggingSessionEventListener] - Session Metrics {
137291307 nanoseconds spent acquiring 1 JDBC connections;
0 nanoseconds spent releasing 0 JDBC connections;
12909270 nanoseconds spent preparing 100 JDBC statements;
13660416454 nanoseconds spent executing 100 JDBC statements;
0 nanoseconds spent executing 0 JDBC batches;
0 nanoseconds spent performing 0 L2C puts;
0 nanoseconds spent performing 0 L2C hits;
0 nanoseconds spent performing 0 L2C misses;
32506326 nanoseconds spent executing 2 flushes (flushing a total of 100 entities and 0 collections);
0 nanoseconds spent executing 0 partial-flushes (flushing a total of 0 entities and 0 collections)
}
[StatisticalLoggingSessionEventListener] - Session Metrics {
141927634 nanoseconds spent acquiring 1 JDBC connections;
0 nanoseconds spent releasing 0 JDBC connections;
0 nanoseconds spent preparing 0 JDBC statements;
0 nanoseconds spent executing 0 JDBC statements;
0 nanoseconds spent executing 0 JDBC batches;
0 nanoseconds spent performing 0 L2C puts;
0 nanoseconds spent performing 0 L2C hits;
0 nanoseconds spent performing 0 L2C misses;
0 nanoseconds spent executing 0 flushes (flushing a total of 0 entities and 0 collections);
0 nanoseconds spent executing 0 partial-flushes (flushing a total of 0 entities and 0 collections)
}
Только для 100 элементов это заняло 14 секунд !!!
Я что-то пропустил в конфигурации hibernate?
Я ОБНОВИЛ свою сущность хорошим генератором, и проблема все еще остается
===================================================
ПЕРВОЕ ОБНОВЛЕНИЕ:
Я проверяю генератор БД seq
Sequence "recntrek.performance_test_id_seq"
Column | Type | Value
---------------+---------+-------------------------
sequence_name | name | performance_test_id_seq
last_value | bigint | 293551
start_value | bigint | 1
increment_by | bigint | 1
max_value | bigint | 9223372036854775807
min_value | bigint | 1
cache_value | bigint | 1
log_cnt | bigint | 32
is_cycled | boolean | f
is_called | boolean | t
Я изменил increment_by AND cache_value на 50 ...
Я перезапустил свой тест, и это заняло 1,4 секунды
Я делаю еще один тест с 10000 новыми элементами, и это заняло ~ 30 секунд
Это большое улучшение, но с этой страницы Влад Михалча: Я еще далеко, чтобы хорошо выступить