How to get entity relationship with annotations using testing class? - spring

I have created a simple Spring-boot application with two entities as Company.java and User.java. These two has #OneToMany relationship. And I have a created a test file for generating typescript file with printing those two entity's attributes. Here is the my test case.
#Inject
RepositoryRestMvcConfiguration configuration;
#Test
public void getEndPoints() {
configuration.resourceMappings().forEach(c -> {
String className = c.getDomainType().getName();
try {
Class<?> entityClass = Class.forName(className);
Field[] fields = entityClass.getDeclaredFields();
File tsClassDir = new File("data/tsClass");
File tsClass = new File(tsClassDir, entityClass.getSimpleName() + ".ts");
if (!tsClass.getParentFile().exists()) {
tsClass.getParentFile().mkdirs();
}
tsClass.createNewFile();
String code = "export interface " + entityClass.getSimpleName() + "{\n";
for (Field field : fields) {
try {
NotNull notNullAnnotation = field.getDeclaredAnnotation(NotNull.class);
Class<?> filedClass = Class.forName(field.getType().getName());
if (notNullAnnotation == null){
code += "\t" + field.getName() + "?: " + filedClass.getSimpleName().trim() + ";" + "\n";
}else{
code += "\t" + field.getName() + ": " + filedClass.getSimpleName().trim() + ";" + "\n";
}
} catch (Exception e) {
// TODO: handle exception
}
// System.err.println(field.getName());
}
code += "}";
Files.write(tsClass.toPath(), code.getBytes());
System.err.println(code);
} catch (Exception e) {
// TODO: handle exception
}
});
}
After test run I got the result given below.
export interface User{
userName: String;
password: String;
email: String;
company?: Company;
}
export interface Company{
name: String;
email: String;
users?: Set;
}
But I need to print that Company and User has #OneToMany relationship in the typescript file. How do I do that?

Related

How to delete alarge amount of data one by one from a table with their relations using transactional annotation

I have a large amount of data that I want to purge from the database, there are about 6 tables of which 3 have a many to many relationship with cascadeType. All the others are log and history tables independent of the 3 others
i want to purge this data one by one and if any of them have error while deleting i have to undo only the current record and show it in console and keep deleting the others
I am trying to use transactional annotation with springboot but all purging stops if an error occurs
how to manage this kind of need?
here is what i did :
#Transactional
private void purgeCards(List<CardEntity> cardsTobePurge) {
List<Long> nextCardsNumberToUpdate = getNextCardsWhichWillNotBePurge(cardsTobePurge);
TransactionTemplate lTransTemplate = new TransactionTemplate(transactionManager);
lTransTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRED);
lTransTemplate.execute(new TransactionCallback<Object>() {
#Override
public Object doInTransaction(TransactionStatus status) {
cardsTobePurge.forEach(cardTobePurge -> {
Long nextCardNumberOfCurrent = cardTobePurge.getNextCard();
if (nextCardsNumberToUpdate.contains(nextCardNumberOfCurrent)) {
CardEntity cardToUnlik = cardRepository.findByCardNumber(nextCardNumberOfCurrent);
unLink(cardToUnlik);
}
log.info(BATCH_TITLE + " Removing card Number : " + cardTobePurge.getCardNumber() + " with Id : "
+ cardTobePurge.getId());
List<CardHistoryEntity> historyEntitiesOfThisCard = cardHistoryRepository.findByCard(cardTobePurge);
List<LogCreationCardEntity> logCreationEntitiesForThisCard = logCreationCardRepository
.findByCardNumber(cardTobePurge.getCardNumber());
List<LogCustomerMergeEntity> logCustomerMergeEntitiesForThisCard = logCustomerMergeRepository
.findByCard(cardTobePurge);
cardHistoryRepository.deleteAll(historyEntitiesOfThisCard);
logCreationCardRepository.deleteAll(logCreationEntitiesForThisCard);
logCustomerMergeRepository.deleteAll(logCustomerMergeEntitiesForThisCard);
cardRepository.delete(cardTobePurge);
});
return Boolean.TRUE;
}
});
}
As a solution to my question:
I worked with TransactionTemplate to be able to manage transactions manually
so if an exception is raised a rollback will only be applied for the current iteration and will continue to process other cards
private void purgeCards(List<CardEntity> cardsTobePurge) {
int[] counter = { 0 }; //to simulate the exception
List<Long> nextCardsNumberToUpdate = findNextCardsWhichWillNotBePurge(cardsTobePurge);
cardsTobePurge.forEach(cardTobePurge -> {
Long nextCardNumberOfCurrent = cardTobePurge.getNextCard();
CardEntity cardToUnlik = null;
counter[0]++; //to simulate the exception
if (nextCardsNumberToUpdate.contains(nextCardNumberOfCurrent)) {
cardToUnlik = cardRepository.findByCardNumber(nextCardNumberOfCurrent);
}
purgeCard(cardTobePurge, nextCardsNumberToUpdate, cardToUnlik, counter);
});
}
private void purgeCard(#NonNull CardEntity cardToPurge, List<Long> nextCardsNumberToUpdate, CardEntity cardToUnlik,
int[] counter) {
TransactionTemplate lTransTemplate = new TransactionTemplate(transactionManager);
lTransTemplate.setPropagationBehavior(TransactionTemplate.PROPAGATION_REQUIRED);
lTransTemplate.execute(new TransactionCallbackWithoutResult() {
#Override
public void doInTransactionWithoutResult(TransactionStatus status) {
try {
if (cardToUnlik != null)
unLink(cardToUnlik);
log.info(BATCH_TITLE + " Removing card Number : " + cardToPurge.getCardNumber() + " with Id : "
+ cardToPurge.getId());
List<CardHistoryEntity> historyEntitiesOfThisCard = cardHistoryRepository.findByCard(cardToPurge);
List<LogCreationCardEntity> logCreationEntitiesForThisCard = logCreationCardRepository
.findByCardNumber(cardToPurge.getCardNumber());
List<LogCustomerMergeEntity> logCustomerMergeEntitiesForThisCard = logCustomerMergeRepository
.findByCard(cardToPurge);
cardHistoryRepository.deleteAll(historyEntitiesOfThisCard);
logCreationCardRepository.deleteAll(logCreationEntitiesForThisCard);
logCustomerMergeRepository.deleteAll(logCustomerMergeEntitiesForThisCard);
cardRepository.delete(cardToPurge);
if (counter[0] == 2)//to simulate the exception
throw new Exception();//to simulate the exception
} catch (Exception e) {
status.setRollbackOnly();
if (cardToPurge != null)
log.error(BATCH_TITLE + " Problem with card Number : " + cardToPurge.getCardNumber()
+ " with Id : " + cardToPurge.getId(), e);
else
log.error(BATCH_TITLE + "Card entity is null", e);
}
}
});
}

Duplication with Chunks in Spring Batch

I have a huge file, I need to read it and dump it into DB. Any invalid records(invalid length, duplicate keys, etc), if present need to be written into a Error Report. Due to the huge size of the file we tried using the chunk-size(commit-interval) as 1000/5000/10000. In the process I found that the data was being processed redundantly due to the usage of chunks and thus my Error Report is incorrect, it not only has the actual invalid records from the input-file but also the duplicates from the chunks.
Code snippet:
#Bean
public Step readAndWriteStudentInfo() {
return stepBuilderFactory.get("readAndWriteStudentInfo")
.<Student, Student>chunk(5000).reader(studentFileReader()).faultTolerant()
.skipPolicy(skipper)..listener(listener).processor(new ItemProcessor<Student, Student>() {
#Override
public Student process(Student Student) throws Exception {
if(processedRecords.contains(Student)){
return null;
}else {
processedRecords.add(Student);
return Student;
}
}
}).writer(studentDBWriter()).build();
}
#Bean
public ItemReader<Student> studentFileReader() {
FlatFileItemReader<Student> reader = new FlatFileItemReader<>();
reader.setResource(new FileSystemResource(studentInfoFileName));
reader.setLineMapper(new DefaultLineMapper<Student>() {
{
setLineTokenizer(new FixedLengthTokenizer() {
{
setNames(classProperties50);
setColumns(range50);
}
});
setFieldSetMapper(new BeanWrapperFieldSetMapper<Student>() {
{
setTargetType(Student.class);
}
});
}
});
reader.setSaveState(false);
reader.setLinesToSkip(1);
reader.setRecordSeparatorPolicy(new TrailerSkipper());
return reader;
}
#Bean
public ItemWriter<Student> studentDBWriter() {
JdbcBatchItemWriter<Student> writer = new JdbcBatchItemWriter<>();
writer.setSql(insertQuery);
writer.setDataSource(datSource);
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<Student>());
return writer;
}
I've tried with various chunk sizes, 10, 100, 1000, 5000. The accuracy of my error report deteriorates with the increase in chunk size. Writing to Error Report is happening from my implementation of Skip Policy, kindly do let me know if that code is required too to help me out.
How do I ensure that my writer picks up unique set of records in each chunk?
Skipper Implementation:
#Override
public boolean shouldSkip(Throwable t, int skipCount) throws SkipLimitExceededException {
String exception = t.getClass().getSimpleName();
if (t instanceof FileNotFoundException) {
return false;
}
switch (exception) {
case "FlatFileParseException":
FlatFileParseException ffpe = (FlatFileParseException) t;
String errorMessage = "Line no = " + ffpe.getLineNumber() + " " + ffpe.getMessage() + " Record is ["
+ ffpe.getInput() + "].\n";
writeToRecon(errorMessage);
return true;
case "SQLException":
SQLException sE = (SQLException) t;
String sqlErrorMessage = sE.getErrorCode() + " Record is [" + sE.getCause() + "].\n";
writeToRecon(sqlErrorMessage);
return true;
case "BatchUpdateException":
BatchUpdateException batchUpdateException = (BatchUpdateException) t;
String btchUpdtExceptionMsg = batchUpdateException.getMessage() + " " + batchUpdateException.getCause();
writeToRecon(btchUpdtExceptionMsg);
return true;
default:
return false;
}

Writing a precise pointcut expression

I am using Spring AOP for logging wherein I want to log input/output of all methods present in package. I have written following pointcut for target package.
#Pointcut("within(com.mypackage.model.*)")
public void allmethods(){};
My logging method is as below.
#Before("allmethods()")
public void LoggingAdviceBefore(JoinPoint joinPoint)
{
StringBuffer logMessage = new StringBuffer();
if(joinPoint != null && joinPoint.getTarget()!=null && joinPoint.getTarget().getClass()!=null)
{
logMessage.append(joinPoint.getTarget().getClass().getName());
logMessage.append(".");
logMessage.append(joinPoint.getSignature().getName());
logMessage.append("(");
// append args
Object[] args = joinPoint.getArgs();
for (int i = 0; i < args.length; i++) {
logMessage.append(args[i]).append(",");
}
if (args.length > 0) {
logMessage.deleteCharAt(logMessage.length() - 1);
}
logMessage.append(")");
log.info(logMessage.toString());
}
}
The code is working fine.
My problem is, even if I do some simple operations like, populating an array list within my code, even that information is getting logged. I don't want such information to be logged.
I want to log inputs only for the methods that I had written in the classes present in target package & not for the code written inside those methods. How do I achieve this?
You can use the below code which I had written months back to understand SNMP framework implementation, it prints i/o of all the methods in package and subpackage, you can remove irrelevant classes and modify according to your needs, if required.
#Aspect
public class Snmp4JProfiler {
private static final Logger LOG = LoggerFactory.getLogger(Snmp4JProfiler.class);
#Pointcut("execution (* org.snmp4j.Snmp.*(..))")
public void allSnmpServices() {
}
#Pointcut("execution (* org.snmp4j.security.U*.*(..))")
public void allUSMServices() {
}
#Around("allUSMServices() || allSnmpServices()")
public Object executionTimeOfService(ProceedingJoinPoint pjp) throws Throwable {
MethodSignature methodSignature = (MethodSignature) pjp.getSignature();
String className = pjp.getSignature().getDeclaringTypeName();
final String methodName = methodSignature.getName();
String methodArgs = "";
for (Object obj : pjp.getArgs()) {
if (obj == null) {
methodArgs += "no args or null,";
} else if (obj instanceof UsmTimeEntry) {
UsmTimeEntry timeEntry = (UsmTimeEntry) obj;
methodArgs += obj.toString() + "[" + timeEntry.getEngineBoots() + "," + timeEntry.getLatestReceivedTime() + ","
+ timeEntry.getTimeDiff() + "," + timeEntry.getEngineID() + "]";
} else if (obj instanceof Object[]) {
methodArgs += obj.toString() + " " + Arrays.toString((Object[]) obj);
} else {
methodArgs += obj;
}
}
LOG.info("Start of method#" + methodName + " #class#" + className + " #args#" + methodArgs);
try {
Object output = pjp.proceed();
String rtValues = "";
if (output == null) {
rtValues += "no args or null,";
} else if (output instanceof UsmTimeEntry) {
UsmTimeEntry timeEntry = (UsmTimeEntry) output;
rtValues += output.toString() + "[" + timeEntry.getEngineBoots() + "," + timeEntry.getLatestReceivedTime() + ","
+ timeEntry.getTimeDiff() + "," + timeEntry.getEngineID() + "]";
} else if (output instanceof Object[]) {
rtValues += output.toString() + " " + Arrays.toString((Object[]) output);
} else {
rtValues += output;
}
LOG.info("End of method#" + methodName + " #class#" + className + " #return#" + rtValues);
return output;
} catch (Exception ex) {
LOG.info("End of method#" + methodName + " #class#" + className + " #error#" + ex.getMessage());
throw ex;
}
}
}

BsonClassMapSerializer already registered for AbstractClassSerializer

I'm using the Mongo c# driver 2.0 and am running into BsonSerializer registration issues when registering AbstractClassSerializers for my Id value objects.
MongoDB.Bson.BsonSerializationException: There is already a serializer registered for type HistoricalEventId.
When I peek into the BsonSerializer I'm seeing that a BsonClassMapSerializer is already registered for my type.
I'm assuming that a BsonClassMapSerializer is being created for my entity types and it's also creating a BsonClassMapSerializer for the Id field as well. Has anyone run into this before? The Bson serializer code is shown below if that helps.
Sorry if the formatting is wrong, c# doesn't seem to be showing up well.
HistoricalEventIdBsonSerializer
public class HistoricalEventIdBsonSerializer : ToObjectIdBsonSerializer<HistoricalEventId>
{
public override HistoricalEventId CreateObjectFromObjectId(ObjectId serializedObj)
{
HistoricalEventId parsedObj;
HistoricalEventId.TryParse(serializedObj, out parsedObj);
return parsedObj;
}
}
ToObjectIdBsonSerializer
public abstract class ToObjectIdBsonSerializer<T> : AbstractClassSerializer<T> where T : class
{
private static readonly Type _convertibleType = typeof(IConvertible<ObjectId>);
public abstract T CreateObjectFromObjectId(ObjectId serializedObj);
public override T Deserialize(BsonDeserializationContext context, BsonDeserializationArgs args)
{
var bsonType = context.Reader.GetCurrentBsonType();
ObjectId value;
switch (bsonType)
{
case BsonType.Undefined:
value = ObjectId.Empty;
context.Reader.ReadUndefined();
break;
case BsonType.Null:
value = ObjectId.Empty;
context.Reader.ReadNull();
break;
case BsonType.ObjectId:
value = context.Reader.ReadObjectId();
break;
case BsonType.String:
value = new ObjectId(context.Reader.ReadString());
break;
default:
throw new NotSupportedException("Unable to create the type " +
args.NominalType.Name + " from the bson type " + bsonType + ".");
}
return this.CreateObjectFromObjectId(value);
}
public override void Serialize(BsonSerializationContext context, BsonSerializationArgs args, T value)
{
if (value == null)
{
context.Writer.WriteObjectId(ObjectId.Empty);
}
else
{
if (!_convertibleType.IsAssignableFrom(args.NominalType))
{
throw new NotSupportedException("The type " + args.NominalType.Name +
" must implement the " + _convertibleType.Name + " interface.");
}
var typedObj = (IConvertible<ObjectId>)value;
context.Writer.WriteObjectId(typedObj.ToValueType());
}
}
}
IConvertible
public interface IConvertible<out T>
{
T ToValueType();
}
My assumption must have been correct because I just fixed this by doing the BsonSerializer registration before creating the MongoClient and getting the database. Hopefully this will help someone else.

Entity Framework Code First Concurrency Control

There
I’m using the EF code first to persist the data by following the sequence: filter => remove => add, run the attached sample(two threads run conrrrently), sometime I noticed there was no existing record return after the filtering, sometime there are two records after the filtering, what I thought/expected is - every time the filter should have one existing record return. Also, there are some exceptions raised up during saving the change.
As I known, EF by default uses the Read Committed isolation level to execute the transaction, I think that means during the filtering, the shared lock is put on the resource, but why I can observe that there is not existing record or two existing records after the filtering, the remove and add operation together should be an atomic operation, right? If I’m right, there should be only and just one record after filtering.
Is there anything I missed? How to handle this case correctly?
Please help.
Another question:
Use the LastUpdated column as the concurrency token, how to the following case correctly:
1. If the entity in the database is newer than the entity in the context, start another thread to archive the entity to the history database.
2. If the entity in the database is old than the entity in the context, retry the saving to overwrite the entity in the database.
Am i right to use the code below to handle the case:
internal void SaveChangesEx()
{
bool saveFailed;
do
{
saveFailed = false;
try
{
base.SaveChanges();
}
catch (DbUpdateConcurrencyException ex)
{
saveFailed = false;
ex.Entries.ToList().ForEach(entry =>
{
if (entry.State == System.Data.EntityState.Deleted)
{
var current = base.Set<Customer>().FirstOrDefault();
Customer rfqInDb = (Customer)entry.GetDatabaseValues();
// If the newer one aready exists, start the archive, otherwise, retry by reloading the entity from DB and marking the state as deleted
if (current.LastUpdated < customerInDb.LastUpdated)
{
using (var archiver = new CustomerArchiveDBContext())
{
archiver.RFQS.Add(current);
archiver.SaveChangesEx();
}
saveFailed = false;
}
else
{
//Refresh the context and retry
entry.Reload();
entry.State = System.Data.EntityState.Deleted;
}
}
});
}
} while (saveFailed);
}
Script: ========
CREATE TABLE [dbo].[Customers](
[FirstName] [nvarchar](20) NOT NULL,
[LastName] [nvarchar](60) NULL,
[Company] [nvarchar](250) NULL,
[Telephone] [nvarchar](20) NULL,
[LastUpdated] [datetime] NULL
) ON [PRIMARY]
Code ========
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Transactions;
using System.Data.Entity.Validation;
using System.Threading.Tasks;
using System.Threading;
using System.Data.Entity.ModelConfiguration;
using System.Data.Entity;
using System.Data.Entity.ModelConfiguration.Conventions;
namespace EFOptimisticConcurrency
{
public abstract class Entity
{
public int Id { get; set; }
}
public class Customer
: Entity
{
public string FirstName { get; set; }
/// <summary>
/// Get or set the surname of this customer
/// </summary>
public string LastName { get; set; }
/// <summary>
/// Get or set the telephone
/// </summary>
public string Telephone { get; set; }
/// <summary>
/// Get or set the company name
/// </summary>
public string Company { get; set; }
public DateTime LastUpdated { get; set; }
}
class CustomerEntityConfiguration
: EntityTypeConfiguration<Customer>
{
/// <summary>
/// Create a new instance of customer entity configuration
/// </summary>
public CustomerEntityConfiguration()
{
//configure keys and properties
this.HasKey(c => c.FirstName);
this.Ignore(c => c.Id);
this.Property(c => c.FirstName)
.HasMaxLength(20)
.IsRequired();
this.Property(c => c.LastName)
.HasMaxLength(60)
.IsRequired();
this.Property(c => c.Company)
.HasMaxLength(250);
//this.Property(c => c.LastUpdated).IsConcurrencyToken();
this.Property(c => c.Telephone)
.HasMaxLength(20);
this.ToTable("Customers");
}
}
public class CustomerContext : DbContext
{
public DbSet<Customer> Customers { get; set; }
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
modelBuilder.Conventions.Remove<OneToManyCascadeDeleteConvention>();
modelBuilder.Configurations.Add(new CustomerEntityConfiguration()); ;
}
}
public class Program
{
public static volatile int showStopper = 0;
static void Main(string[] args)
{
var color = Console.ForegroundColor;
EntityFrameworkProfiler.Initialize();
Task.Factory.StartNew(() =>
{
while (true)
{
Customer customer = new Customer();
customer.FirstName = "FirstName";
customer.LastName = "Last " + new Random().Next(0, 10000).ToString();
customer.Telephone = "686868";
customer.Company = "MyCity";
customer.LastUpdated = DateTime.Now;
if (showStopper == 2)
{
Console.ReadLine();
showStopper = 0;
}
try
{
Console.WriteLine("Start the Store => " + customer.LastName + " , " + customer.LastUpdated.ToString());
{
int i = 0;
using (var customerConext = new CustomerContext())
{
Console.WriteLine("Start the filter 1 => " + customer.Telephone + " , " + customer.LastUpdated.ToString());
var matched = customerConext.Customers.Where(c => c.Telephone == "686868" && c.LastUpdated < customer.LastUpdated);
foreach (var hit in matched)
{
i++;
customerConext.Customers.Remove(hit);
}
if (i == 2)
{
Console.WriteLine("1 - 2 exist, has the problem now");
showStopper = 2;
}
else if (i == 0)
{
Console.WriteLine("1 - 0 exist, has the problem now");
showStopper = 2;
}
Console.WriteLine("Start Adding 1 => " + customer.LastName + " , " + customer.LastUpdated.ToString());
try
{
customerConext.Customers.Add(customer);
customerConext.SaveChanges();
Console.WriteLine("SaveChanges 1 => " + customer.LastName + " , " + customer.LastUpdated.ToString());
}
catch (Exception ex)
{
Console.WriteLine("Exception 1 : " + ex.Message + " => " + customer.LastName + " , " + customer.LastUpdated);
if (ex.InnerException != null)
{
Console.WriteLine("Inner Exception 2 : " + ex.InnerException.Message + " => " + customer.LastName + " , " + customer.LastUpdated);
}
}
}
}
}
catch (Exception ex)
{
Console.WriteLine("Exception 1 " + ex.Message);
if (ex.InnerException != null)
{
Console.WriteLine(ex.InnerException.Message);
}
showStopper = 2;
}
}
});
Thread.Sleep(10000);
Task.Factory.StartNew(() =>
{
while (true)
{
Console.ForegroundColor = color;
try
{
Customer customer = new Customer();
customer.FirstName = "FirstName";
customer.LastName = "Last " + new Random().Next(0, 10000).ToString();
customer.Telephone = "686868";
customer.Company = "MyCity2";
customer.LastUpdated = DateTime.Now;
if (showStopper == 3)
{
Console.ReadLine();
showStopper = 0;
}
Console.WriteLine("Start the store 2 => " + customer.LastName + " , " + customer.LastUpdated.ToString());
{
int i = 0;
using (var customerConext = new CustomerContext())
{
Console.WriteLine("Start the filter 2 => " + customer.Telephone + " , " + customer.LastUpdated.ToString());
var matched = customerConext.Customers.Where(c => c.Telephone == "686868" && c.LastUpdated < customer.LastUpdated);
foreach (var hit in matched)
{
i++;
customerConext.Customers.Remove(hit);
}
Console.WriteLine("Start Adding 2 => " + customer.LastName + " , " + customer.LastUpdated.ToString());
try
{
customerConext.Customers.Add(customer);
customerConext.SaveChanges();
Console.WriteLine("SaveChanges 2 => " + customer.LastName + " , " + customer.LastUpdated.ToString());
}
catch (Exception ex)
{
Console.WriteLine("Exception 2 : " + ex.Message + " => " + customer.LastName + " , " + customer.LastUpdated);
if (ex.InnerException != null)
{
Console.WriteLine("Inner Exception 2 : " + ex.InnerException.Message + " => " + customer.LastName + " , " + customer.LastUpdated);
}
showStopper = 2;
}
}
if (i == 2)
{
Console.WriteLine("1 - 2 exist, has the problem now");
showStopper = 2;
}
else if (i == 0)
{
Console.WriteLine("1 - 0 exist, has the problem now");
showStopper = 2;
}
}
}
catch (Exception ex)
{
Console.WriteLine("Exception 2 " + ex.Message);
if (ex.InnerException != null)
{
Console.WriteLine(ex.InnerException.Message);
}
}
}
});
Console.WriteLine("PRESS ANY KEY TO END");
Console.ReadLine();
}
}
}

Resources