java.lang.IllegalStateException: SparkContext has been shutdown - spark-streaming

I have developed a streaming application which use several file streams and then unioning them..I use checkpointing to reload from checkpoint data..after 4 , 5 time relaoding from checkpoint data..my application gives me the above exception..my codes are as follow:
def creatingFunc(): StreamingContext = {
//System.setProperty("hadoop.home.dir", "C:\\hadoop")
val conf = new SparkConf().setAppName("FileStreaming").set("spark.streaming.fileStream.minRememberDuration", "2000000h").set("spark.executor.instances","2") /*.set("SPARK_CONF_DIR","src/main/resources")*/
.registerKryoClasses(Array(classOf[org.apache.hadoop.io.LongWritable]))//.setMaster("local[7]")
// Verify that the attached Spark cluster is 1.4.0+
val sc = new SparkContext(conf)
require(sc.version.replace(".", "").substring(0, 3).toInt >= 160, "Spark 1.6.0+ is required to run this code. Please attach it to a Spark 1.6.0+ cluster.")
// Create a StreamingContext
val ssc = new StreamingContext(sc, Seconds(batchIntervalSeconds))
ssc.checkpoint("/mapr/cellos-mapr/user/mbazarganigilani/SparkStreaming1/src/main/checkpoints")
val funcGSSNFilterHeader = (x: String) => {
!x.contains("servedMSISDN")
}
val funcCCNFilterHeader = (x: String) => {
!x.contains("resultCode")
}
val unionProbeStreams=(1 to 2).map(i=>i match {
case 1 => {
val ggsnArray = ssc.fileStream[LongWritable, Text, TextInputFormat]("/mapr/cellos-mapr/user/mbazarganigilani/SparkStreaming1/src/main/GGSN", filterF, false)
.map(x => x._2.toString()).filter(x => funcGSSNFilterHeader(x)).map(x => {
x.split(",")
}).map(x => x.length match {
case 25 => new SIMPLE_GGSN(x(0), x(1), x(2), x(3), x(4), x(5), x(6), x(7), x(8), x(9), x(10), x(11), x(12), x(13), x(14), new SIMPLE_GGSN_2(x(15), x(16), x(17), x(18), x(19), x(20), x(21), x(22), x(23), x(24)))
case _ => new SIMPLE_GGSN("Invalid GGSN CDR", "", "", "", "", "", "", "", "", "", "", "", "", "", "", new SIMPLE_GGSN_2("", "", "", "", "", "", "", "", "", ""))
}).map(x => (new SIMPLE_KEY_JOINS(x.IMSI, x.CAHRGING_ID), x)).map(x => (x._1, x._2.IMSI))
ggsnArray
}
case 2 => {
val ccnArray=ssc.fileStream[LongWritable, Text, TextInputFormat]("/mapr/cellos-mapr/user/mbazarganigilani/SparkStreaming1/src/main/CCN", filterF, false)
.map(x => x._2.toString()).filter(x => funcCCNFilterHeader(x)).map(x => x.split(",")).map(x => x.length match {
case 43 => new SIMPLE_CCN(x(0), x(1), x(2), x(3), x(4), x(5), x(6), x(7), x(8), x(9), x(10), x(11), x(12), x(13), x(14), x(15), x(16), x(17),
x(18), x(19), x(20), new SIMPLE_CCN_2(x(21), x(22), x(23), x(24), x(25), x(26), x(27), x(28), x(29), x(30), x(31), x(32), x(33), x(34), x(35), x(36), x(37), x(38), x(39), x(40), x(41), x(42)))
case _ => new SIMPLE_CCN("Invalid CCN CDR", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", new SIMPLE_CCN_2("", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", ""))
}).map(x => (new SIMPLE_KEY_JOINS(x.IMSI, x.ccn_2.CHARGINGCONTEXT_16778226), x)).map(x => (x._1, x._2.IMSI))
ccnArray
}
})
val joined=ssc.union(unionProbeStreams)
joined.checkpoint(Duration(batchIntervalSeconds * 1000 * 5))
//joined.foreachRDD(_.count())
joined.foreachRDD(y=> {
println("this count for joint is "+ y.count())
//y.foreach(x=> println(x))
})
ssc.remember(Minutes(1)) // To make sure data is not deleted by the time we query it interactively
println("Creating function called to create new StreamingContext")
newContextCreated = true
ssc
}
I get the follwing exception when I run my application after several times loading from checkpoint data:
java.lang.IllegalStateException: SparkContext has been shutdown
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1824)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1845)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1858)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:1929)
at org.apache.spark.rdd.RDD.count(RDD.scala:1157)
at UnionStream$$anonfun$creatingFunc$5.apply(UnionStreaming.scala:453)
at UnionStream$$anonfun$creatingFunc$5.apply(UnionStreaming.scala:451)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:661)
at org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:661)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ForEachDStream.scala:50)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:50)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:50)
at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:49)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:49)
at org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:49)
at scala.util.Try$.apply(Try.scala:161)
at org.apache.spark.streaming.scheduler.Job.run(Job.scala:39)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply$mcV$sp(JobScheduler.scala:224)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:224)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:224)
at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
at org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:223)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
2016-09-27 12:58:15,025 ERROR [JobScheduler] scheduler.JobScheduler: Error running job streaming job 1474943750000 ms.4
I am using Spark Streaming 1.6.1..

Related

SwiftUI background thread

on my project I'm try to download some airport data via API call and display it on a list,
while this happening on my SwiftUI view I want to display a simple Circleloading animation.
To lunch the download I use a button that trigger the action and activate the animation using the #state (isSearching = false or true)
here my view :
import SwiftUI
struct AirportSearch: View {
#ObservedObject var dm : DataManager
#State var ICAOSearsh = ""
#State var isSearching = false
var body: some View {
ZStack {
backgroundGradiet.edgesIgnoringSafeArea(.all)
if self.isSearching == false{
LottieView(filename: "planeAnimation")
}
VStack{
CustomTextField(placeholder: Text(" Search ICAO").foregroundColor(.white), text: $ICAOSearsh)
.padding(.horizontal , 10)
.frame(height: 40.0)
.border(Color.black.opacity(5.0), width: 1)
\\ Button HERE________________________________________________________________
Button(action: {
self.isSearching = true
self.dm.searchAirportOnline(icaoCode: self.ICAOSearsh)
{
debugPrint("finito search")
self.isSearching = false
}
}) {
HStack{
Image(systemName: "magnifyingglass").foregroundColor(Color.white)
Text("Find your airport").foregroundColor(.white)
}
}
\\ button above HERE________________________________________________________
if isSearching == false{
List{
ForEach(self.dm.airportVector) { aeroporto in
VStack{
HStack{
Image(systemName: "airplane").foregroundColor(.red)
Text(aeroporto.aptShortName).bold()
Spacer()
Text(aeroporto.aptICAO)
Text("/")
Text(aeroporto.aptIATA)
}
HStack{
Text(aeroporto.countryCode)
Spacer()
Text(aeroporto.continent)
}.font(.system(size: 13))
}
.foregroundColor(.white)
}.padding(.horizontal)
}.onAppear {
UITableView.appearance().backgroundColor = UIColor.clear
UITableViewCell.appearance().backgroundColor = .clear
}
} else {
Spacer()
CircleLoading()
Spacer()
}
}
}
}
var backgroundGradiet: LinearGradient {
let gradient = LinearGradient(gradient: Gradient(colors: [Color.blue,Color.black]), startPoint: .topTrailing, endPoint: .bottomLeading)
return gradient
}
}
unfortunately when I press the button the animation freeze.
to solve this issue I'm try to use the DispatchQueue.global().async {} and DispatchQueue.main.async {} on my class DataManager, but I'm getting the warning :
Publishing changes from background threads is not allowed; make sure to publish values from the main thread (via operators like receive(on:)) on model updates```
here below my class :
import UIKit
import Combine
import SwiftUI
import CoreLocation
class DataManager: ObservableObject {
let objectWillChange = PassthroughSubject<Void,Never>()
static let shared = DataManager()
#Published var airportVector : [AirportModel] = []
{
didSet {
objectWillChange.send()
}
}
init() {
}
// MARK: - Search Online ICAO apt
func searchAirportOnline(icaoCode: String, closure : #escaping () ->()) {
DispatchQueue.global().async {
self.airportVector = [] // empty the vector to
// set the request apiKey
let headers : HTTPHeaders = [
"x-rapidapi-host": "aerodatabox.p.rapidapi.com",
"x-rapidapi-key": "18df152ef8msh5e365520639e47ep1fb24bjsn8e65c311e1f7"
]
let linkRequest = "https://aerodatabox.p.rapidapi.com/airports/icao/\(icaoCode)?withTime=true&withRunways=false"
// make request via AlamoFire
AF.request(linkRequest, method: .get, headers: headers)
.responseData { (responseData) in
switch responseData.result {
case .success(let value ) :
let json = JSON(value)
var airport = AirportModel(aptICAO: "", aptIATA: "", aptShortName: "", aptFullName: "", aptMunicipalName: "", locationLat: "", locationLon: "", countryName: "", countryCode: "", continent: "", timeZone: "", linkLiveATC: "", linkLinkFlightRadar: "", airportWebsite: "", currentTimeUTC: "", currentTimeLocal: "", runway: [RunwayModel(name1: RunwayModel.Runway(lengthM: "", lengthF: "", hasLighting: false, widthM: "", widthF: "", surface: "", isClosed: false, name: "", trueHdg: "", displacedThresholdM: "", displacedThresholdF: "", locationLat:0.0, locationLong: 0.0), name2: RunwayModel.Runway(lengthM: "", lengthF: "", hasLighting: false, widthM: "", widthF: "", surface: "", isClosed: false, name: "", trueHdg: "", displacedThresholdM: "", displacedThresholdF: "", locationLat:0.0, locationLong: 0.0))])
airport.aptICAO = json["icao"].stringValue
airport.aptIATA = json["iata"].stringValue
airport.aptShortName = json["shortName"].stringValue
airport.aptFullName = json["fullName"].stringValue
airport.aptMunicipalName = json["municipalityName"].stringValue
airport.locationLat = json["location"]["lat"].stringValue
airport.locationLon = json["location"]["lon"].stringValue
airport.countryName = json["country"]["name"].stringValue
airport.countryCode = json["country"]["code"].stringValue
airport.continent = json["continent"]["name"].stringValue
airport.timeZone = json["timeZone"].stringValue
airport.linkLiveATC = json["urls"]["liveAtc"].stringValue
airport.linkLinkFlightRadar = json["urls"]["flightRadar"].stringValue
airport.airportWebsite = json["urls"]["webSite"].stringValue
airport.currentTimeUTC = json["currentTime"]["utcTime"].stringValue
airport.currentTimeLocal = json["currentTime"]["localTime"].stringValue
self.runwayRequest(airportICAO: icaoCode) { (vettoreRunwayModel) in
airport.runway = vettoreRunwayModel
}
DispatchQueue.main.async { // ERROR LOOKS HERE..
self.airportVector.append(airport) // aggiungo ad airport vector l'aeroporto trovato
closure()
}
case.failure(let error) :
debugPrint(error)
debugPrint("cazzoo")
}
}
}
}
}
the mistake looks at self.airportVector.append because is a #Published var and I don't know how to solve the issue ..
thanks for the help,
Try this one
func searchAirportOnline(icaoCode: String, closure : #escaping () ->()) {
self.airportVector = [] // empty the vector to
// set the request apiKey
let headers : HTTPHeaders = [
"x-rapidapi-host": "aerodatabox.p.rapidapi.com",
"x-rapidapi-key": "18df152ef8msh5e365520639e47ep1fb24bjsn8e65c311e1f7"
]
let linkRequest = "https://aerodatabox.p.rapidapi.com/airports/icao/\(icaoCode)?withTime=true&withRunways=false"
// make request via AlamoFire
AF.request(linkRequest, method: .get, headers: headers)
.responseData { (responseData) in
switch responseData.result {
case .success(let value ) :
let json = JSON(value)
var airport = AirportModel(aptICAO: "", aptIATA: "", aptShortName: "", aptFullName: "", aptMunicipalName: "", locationLat: "", locationLon: "", countryName: "", countryCode: "", continent: "", timeZone: "", linkLiveATC: "", linkLinkFlightRadar: "", airportWebsite: "", currentTimeUTC: "", currentTimeLocal: "", runway: [RunwayModel(name1: RunwayModel.Runway(lengthM: "", lengthF: "", hasLighting: false, widthM: "", widthF: "", surface: "", isClosed: false, name: "", trueHdg: "", displacedThresholdM: "", displacedThresholdF: "", locationLat:0.0, locationLong: 0.0), name2: RunwayModel.Runway(lengthM: "", lengthF: "", hasLighting: false, widthM: "", widthF: "", surface: "", isClosed: false, name: "", trueHdg: "", displacedThresholdM: "", displacedThresholdF: "", locationLat:0.0, locationLong: 0.0))])
airport.aptICAO = json["icao"].stringValue
airport.aptIATA = json["iata"].stringValue
airport.aptShortName = json["shortName"].stringValue
airport.aptFullName = json["fullName"].stringValue
airport.aptMunicipalName = json["municipalityName"].stringValue
airport.locationLat = json["location"]["lat"].stringValue
airport.locationLon = json["location"]["lon"].stringValue
airport.countryName = json["country"]["name"].stringValue
airport.countryCode = json["country"]["code"].stringValue
airport.continent = json["continent"]["name"].stringValue
airport.timeZone = json["timeZone"].stringValue
airport.linkLiveATC = json["urls"]["liveAtc"].stringValue
airport.linkLinkFlightRadar = json["urls"]["flightRadar"].stringValue
airport.airportWebsite = json["urls"]["webSite"].stringValue
airport.currentTimeUTC = json["currentTime"]["utcTime"].stringValue
airport.currentTimeLocal = json["currentTime"]["localTime"].stringValue
self.runwayRequest(airportICAO: icaoCode) { (vettoreRunwayModel) in
DispatchQueue.main.async {
airport.runway = vettoreRunwayModel
}
}
DispatchQueue.main.async {
self.airportVector.append(airport) // aggiungo ad airport vector l'aeroporto trovato
closure()
}
case.failure(let error) :
debugPrint(error)
debugPrint("cazzoo")
}
}
}

is there a way to choose which object to return based on it's attribute in vuex?

I have an array of objects named Employees and am trying to get the ones who are department_id=3 and I don't want to go back to laravel and make another request so is there a way to do so with vuex?
"id": 25,
"name": "name",
"email": "name#gmail.com",
"last_name": "lastname",
"phone": "98745632",
"gender": "Male",
"nationality": "Tunisian",
"school": "ISIMM",
"experience_level": "2",
"source": "Linkedin",
"hiring_date": "2020-04-17",
"end_contract": "2020-04-18",
"position": "web developer",
"grade": "Junior",
"contract": "Cdi",
"department_id": 1,
"company_id": 1,
"archived": "0",
"img": null,
"supervisor_id": 1,
"ipAdress": null,
"last_login_at": null,
"department": {
"id": 1,
"name": "mobile"
},
here's
state :
const employee = {
state: {
Employees: [],
sysAdmins: [],
},
here's
getters :
sysAdmins: (state) =>
state.Employees.map((element) => (element.department_id = "3")),
Employees: (state) => state.Employees,
here's
mutations :
getsysAdmins(state, employees) {
state.sysAdmins = employees;
},
getEmployees(state, employees) {
state.Employees = employees;
},
here's
actions :
getEmployees(context) {
const config = {
headers: {
"x-api-key": process.env.VUE_APP_SIRH_X_API_KEY,
Authorization: localStorage.getItem("access_token"),
},
};
return new Promise((resolve, reject) => {
axios
.get("/employees/all_employees", config)
.then((response) => {
context.commit("getEmployees", response.data.data.users);
context.commit("getsysAdmins", response.data.data.users);
resolve(response);
})
.catch((error) => {
reject(error);
});
});
},
If I understand it right, you want to return the while Employee Object for those employees that work in a certain department.
You can do this by filtering your Employees array. I would write that as following:
getters: {
employees: (state) => state.Employees,
sysAdmins: (state) => state.Employees.filter((employee) => employee.department_id === 3),
// If your DB returns a string as the department_id, you'll have to check against "3"
}
If sysAdmins is always a subset of the employees, it makes more sense to always use a getter instead of placing these in a separate array in the state.
Some other notes:
- You mutations are called get... while these are setters, it might be best to rename these.
- In your action, you currently set the same result as employees and sysAdmins. Again, I would just set employees and always filter the sysAdmins from that array.
try this..
check the result using console.log to make it sure.
const newValue = {
...JSON.parse(localStorage.getItem("Employees")),
department.id: 3
}
localStorage.setItem("Employees", JSON.stringify(newValue))
or
// Get the user from localStorage and parse into object
let department = JSON.parse(localStorage.getItem("Employees"));
// Update an attribute on the user object
Employees.department.id = 2;
// Update the user in localStorage and stringify to string
localStorage.setItem("Employees", JSON.stringify(Employees));

Variable mirroring other ones

So I have created a simple chat bot, and I'm having trouble with one of it's commands, /resetall. It is supposed to change every users values to the values of the user "default". However, it seems that default is causing all values to change by 2.
"data": {
"user": {
"bob": {
"admin": "true",
"consecutiveCommands": "0",
"nickname": "",
"sentMessages": "2"
},
"default": {
"admin": "true",
"consecutiveCommands": "0",
"nickname": "",
"sentMessages": "2"
},
"me": {
"admin": "true",
"consecutiveCommands": "0",
"nickname": "",
"sentMessages": "2"
},
"total": {
"admin": "true",
"consecutiveCommands": "0",
"nickname": "",
"sentMessages": "2"
}
},
"chat": {
"commandSender": "me",
"lastImage": "",
"lastMessage": "/pong",
"lastSender": "me",
"lastTimestamp": "11:59",
"wasCommand": "true"
}
}
and my go code:
// each incoming message
type Message struct {
Message string
From string
Chat string
Timestamp string
IsCommand bool
}
//adds one to a string
func addOne(s string) string {
i, _ := strconv.Atoi(s)
return strconv.Itoa(i + 1)
}
//counts messages sent
func messageCounter(data map[string]Chat, event *Message) map[string]Chat {
//counts messages sent by user
data[event.Chat].Data.User[event.From]["sentMessages"] = addOne(data[event.Chat].Data.User[event.From]["sentMessages"])
data[event.Chat].Data.User["total"]["sentMessages"] = addOne(data[event.Chat].Data.User["total"]["sentMessages"])
return data
}
//sets variables for future use / other functions
func eventRecorder(data map[string]Chat, event *Message) map[string]Chat {
if !event.IsCommand {
data[event.Chat].Data.Chat["lastMessage"] = event.Message
data[event.Chat].Data.Chat["lastSender"] = event.From
data[event.Chat].Data.Chat["lastTimestamp"] = event.Timestamp
data[event.Chat].Data.Chat["wasCommand"] = "false"
} else {
data[event.Chat].Data.Chat["wasCommand"] = "true"
data[event.Chat].Data.Chat["commandSender"] = event.From
}
return data
}
//supposed to set all users data to the default user
func resetall(event *Message, data map[string]Chat) (error, map[string]Chat) {
default_user := data[event.Chat].Data.User["default"]
if data[event.Chat].Data.User[event.From]["admin"] == "true" {
for user, _ := range data[event.Chat].Data.User {
if user != "default" {
data[event.Chat].Data.User[user] = default_user
print(user + "\n")
}
}
return nil, data
}
return errors.New("don't have permission")), data
}
func main() {
processingFuncs := []func(map[string]Chat, *Message) map[string]Chat{
messageCounter,
eventRecorder,
}
data, _ := readsettings() //reads the data from a json file
event := &Message{"/resetall", "me", "chat123", "11:59", false}
if strings.Split(event.Message, " ")[0] == "/resetall" {
event.IsCommand = true
_, data = resetall(event, data)
fmt.Println("success")
}
for _, processingFunc := range processingFuncs {
processingFunc(data, event)
}
writesettings(data) //writes the data to a json file
}
So if I set everyone's message counter to 0 and run it, it sets every single user's message counter to 2 (including default). Each time I run it, the value increases by 2. Can anyone help explain why this is happening
this line to copy the user
data[event.Chat].Data.User[user] = default_user
made a reference to it, I don't know why I didn't spot it earlier. I replaced it with this code
for k, v := range data[event.Chat].Data.User["default"] {
data[event.Chat].Data.User[user][k] = v
}
and it worked perfectly.

In Alexa, how do I define slot defaults in my Intent code?

I have one intent in my lambda function. I am trying to fill 4 slots, 3 of which are required. In my tests it seems like I have to set the Assignee field to a default or something fails in my actual handler which happens after the else statement below. Here's how I'm currently defining the defaults:
if strings.ToUpper(request.DialogState) == "STARTED" {
log.Println("DialogState == STARTED")
// Pre-fill slots: update the intent object with slot values for which
// you have defaults, then return Dialog.Delegate with this updated intent
// in the updatedIntent property.
slots := make(map[string]alexa.IntentSlot)
slots["Summary"] = alexa.IntentSlot{
Name: "Summary",
Value: "",
ConfirmationStatus: "NONE",
}
slots["TicketType"] = alexa.IntentSlot{
Name: "TicketType",
Value: "",
ConfirmationStatus: "NONE",
}
slots["Project"] = alexa.IntentSlot{
Name: "Project",
Value: "",
ConfirmationStatus: "NONE",
}
slots["Assignee"] = alexa.IntentSlot{
Name: "Assignee",
Value: "tcheek",
ConfirmationStatus: "NONE",
}
i := &alexa.Intent{
Name: "OpenTicketIntent",
ConfirmationStatus: "NONE",
Slots: slots,
}
response.AddDialogDirective("Dialog.Delegate", "", "", i)
response.ShouldSessionEnd = false
log.Println("DialogState has exited STARTED")
} else if strings.ToUpper(request.DialogState) != "COMPLETED" {
log.Println("DialogState == IN PROGRESS")
// return a Dialog.Delegate directive with no updatedIntent property.
response.ShouldSessionEnd = false
response.AddDialogDirective("Dialog.Delegate", "", "", nil)
log.Println("DialogState has exited IN PROGRESS")
} else {
I have also tried setting just the Assignee field as a default, like this:
slots := make(map[string]alexa.IntentSlot)
slots["Assignee"] = alexa.IntentSlot{
Name: "Assignee",
Value: "tcheek",
ConfirmationStatus: "NONE",
}
i := &alexa.Intent{
Name: "OpenTicketIntent",
ConfirmationStatus: "NONE",
Slots: slots,
}
response.AddDialogDirective("Dialog.Delegate", "", "", i)
In this scenario I get the following lambda function response in the simulator:
{
"body": {
"version": "1.0",
"response": {
"directives": [
{
"type": "Dialog.Delegate",
"updatedIntent": {
"name": "OpenTicketIntent",
"confirmationStatus": "NONE",
"slots": {
"Assignee": {
"name": "Assignee",
"value": "tcheek",
"confirmationStatus": "NONE"
}
}
}
}
],
"shouldEndSession": false
}
}
}
The problem is that once I ask it to open a bug ticket (which maps to the intent with the "Open a {ticketType} ticket" utterance), it gives the response that "There was a problem with the requested skill's response".
Am I wrong to think that setting defaults is necessary? Am I setting defaults incorrectly?
As per my knowledge in response, u need to include all slots. As here the intent name and slots match then only you'll get the correct response

All connections are active in Tomcat datasource pool with Spring

My Spring Boot application uses JDBCTemplate to send SQL queries to a PostgreSQL Database. It seems that each time a connection is got from the pool by the template, the connection is never released. The number of active connections (datasource.primary.active) is always increasing.
In the logs, after SQL Query using JDBCTemplate, I can see :
DEBUG o.s.j.d.DataSourceUtils - Returning JDBC Connection to DataSource
But the count of idle connection stay with the same value and the count of active connections is not decreasing. When the maximum value is reached, it becomes impossible to retrieve a connection to execute a query.
So, I think there is no return of the connection to the datasource pool, any idea please ?
Here is the datasource configuration got with Actuator :
"dataSource": {
"prefix": "spring.datasource.tomcat",
"properties": {
"connectionProperties": null,
"propagateInterruptState": false,
"validator": null,
"useDisposableConnectionFacade": true,
"defaultCatalog": null,
"validationInterval": 3000,
"jmxEnabled": true,
"ignoreExceptionOnPreLoad": false,
"logAbandoned": false,
"commitOnReturn": false,
"password": "******",
"maxIdle": 100,
"testWhileIdle": false,
"removeAbandoned": false,
"poolProperties": {
"dbProperties": {
"user": "postgres",
"password": "******"
},
"url": "jdbc:postgresql://localhost:5432/tvir",
"driverClassName": "org.postgresql.Driver",
"defaultAutoCommit": null,
"defaultReadOnly": null,
"defaultTransactionIsolation": -1,
"defaultCatalog": null,
"connectionProperties": null,
"initialSize": 10,
"maxActive": 100,
"maxIdle": 100,
"minIdle": 10,
"maxWait": 30000,
"validationQuery": "SELECT 1",
"validationQueryTimeout": -1,
"validatorClassName": null,
"validator": null,
"testOnBorrow": true,
"testOnReturn": false,
"testWhileIdle": false,
"timeBetweenEvictionRunsMillis": 5000,
"numTestsPerEvictionRun": 0,
"minEvictableIdleTimeMillis": 60000,
"accessToUnderlyingConnectionAllowed": true,
"removeAbandoned": false,
"removeAbandonedTimeout": 60,
"logAbandoned": false,
"name": "Tomcat Connection Pool[1-574817798]",
"password": "******",
"username": "postgres",
"validationInterval": 3000,
"jmxEnabled": true,
"initSQL": null,
"testOnConnect": false,
"jdbcInterceptors": null,
"fairQueue": true,
"useEquals": true,
"abandonWhenPercentageFull": 0,
"maxAge": 0,
"useLock": false,
"suspectTimeout": 0,
"dataSource": null,
"dataSourceJNDI": null,
"alternateUsernameAllowed": false,
"commitOnReturn": false,
"rollbackOnReturn": false,
"useDisposableConnectionFacade": true,
"logValidationErrors": false,
"propagateInterruptState": false,
"ignoreExceptionOnPreLoad": false,
"useStatementFacade": true
},
And the code used to query the db :
JdbcTemplate jdbcTemplate = appCtx.getBean(JdbcTemplate.class);
ResultSet columns = jdbcTemplate.getDataSource().getConnection().getMetaData().getColumns(null, null, source.getTable().toLowerCase(), null);
String selectList = "";
while (columns.next())
{
String colName = columns.getString("COLUMN_NAME");
String colType = columns.getString("DATA_TYPE");
if(!selectList.equals("")) {
selectList += ", ";
}
if((""+java.sql.Types.INTEGER).equalsIgnoreCase(colType) ||
(""+java.sql.Types.DOUBLE).equalsIgnoreCase(colType) ||
(""+java.sql.Types.BIGINT).equalsIgnoreCase(colType) ||
(""+java.sql.Types.FLOAT).equalsIgnoreCase(colType) ) {
selectList += "SUM(" + colName + ")";
} else {
selectList += "MAX(" + colName + ")";
}
selectList += " AS "+colName;
}
String sql = "SELECT "+selectList+" FROM "+source.getTable()+" "+
"WHERE "+source.getDateColumn()+" >= ? "+
"AND "+source.getDateColumn()+" <= ? ";
List<Map<String, Object>> results = jdbcTemplate.queryForList(sql, Date.valueOf(startDate), Date.valueOf(endDate));
Spring boot let you configure how you want your datasource to behave.
You will find a full list on the official doc.
check the following properties for your case :
spring.datasource.maxActive
spring.datasource.maxIdle
Depending on what connection pool you are using you can also tune it using spring boot properties (everything is in the doc) .

Resources