Prestashop Link to database cannot be established: SQLSTATE[HY000] [1226] - prestashop-1.7

I get an ERROR 500 when trying to access the BackOffice of my prestahsop, I can only enter with Debug mode on.
My hosting provider told me I should close connections on the script, but I don't know what to insert or modify to which script.
[PrestaShopException]
Link to database cannot be established: SQLSTATE[HY000] [1226] User 'u488328531_pcFsK' has exceeded the 'max_user_connections' resource (current value: 15)
at line 127 in file classes/db/DbPDO.php
122. public function connect()
123. {
124. try {
125. $this->link = $this->getPDO($this->server, $this->user, $this->password, $this->database, 5);
126. } catch (PDOException $e) {
127. throw new PrestaShopException('Link to database cannot be established: ' . $e->getMessage());
128. }
129.
130. $this->link->exec('SET SESSION sql_mode = \'\'');
131.
132. return $this->link;
DbPDOCore->connect - [line 330 - classes/db/Db.php]
DbCore->__construct - [line 241 - classes/db/Db.php] - [4 Arguments]
DbCore::getInstance - [line 47 - config/alias.php]
pSQL - [line 340 - classes/shop/Shop.php] - [1 Arguments]
ShopCore::initialize - [line 119 - config/config.inc.php]
require - [line 40 - admin/index.php] - [1 Arguments]enter code here

Same issue here
5 years of work down the drain with prestashop
[PrestaShopException]
Link to database cannot be established: SQLSTATE[08004] [1040] Too many connections
at line 127 in file classes/db/DbPDO.php
public function connect()
{
try {
$this->link = $this->getPDO($this->server, $this->user, $this->password, $this->database, 5);
} catch (PDOException $e) {
127. throw new PrestaShopException('Link to database cannot be established: ' . $e->getMessage());
}
$this->link->exec('SET SESSION sql_mode = \'\'');
return $this->link;
DbPDOCore->connect - [line 330 - classes/db/Db.php]
DbCore->__construct - [line 241 - classes/db/Db.php] - [4 Arguments]
DbCore::getInstance - [line 47 - config/alias.php]
pSQL - [line 340 - classes/shop/Shop.php] - [1 Arguments]
ShopCore::initialize - [line 119 - config/config.inc.php]
require - [line 27 - index.php] - [1 Arguments]

Related

Kusto Ingest - KustoServiceError 'BadRequest_SyntaxError'

I have the following code for ingesting data into Azure Data Explore using Python in Databricks:
df=pd.DataFrame({"StringCol": ["123ABC", 'B123', 'C123','D123'],"NumberCol": [1,2,3,4],"DecimalCol": [1,2.2,3.3,4.4],"DateCol": ['1/1/20','2/2/20','3/3/30','4/4/20']})
ingestion_props = IngestionProperties(database=db, table='TestTable_DeleteMe')
connWrite.ingest_from_dataframe(df, ingestion_properties=ingestion_props)
This gives me the error:
BadRequest_SyntaxError', 'message': 'Request is invalid and cannot be executed
Earlier in my code I created a table using the same data types as this dummy pandas dataframe. Now I'm trying to load the data into the table. Full stack trace:
KustoServiceError Traceback (most recent call last)
<command-3953651275234016> in <module>
1 df=pd.DataFrame({"StringCol": ["123ABC", 'B123', 'C123','D123'],"NumberCol": [1,2,3,4],"DecimalCol": [1,2.2,3.3,4.4],"DateCol": ['1/1/20','2/2/20','3/3/30','4/4/20']})
2 ingestion_props = IngestionProperties(database=db, table='TestTable_DeleteMe')
----> 3 connWrite.ingest_from_dataframe(df, ingestion_properties=ingestion_props)
4
5 #adx_loadIntoTable(connWrite,db,df,'TestTable_DeleteMe')
/databricks/python/lib/python3.7/site-packages/azure/kusto/ingest/ingest_client.py in ingest_from_dataframe(self, df, ingestion_properties)
52 ingestion_properties.format = DataFormat.CSV
53
---> 54 self.ingest_from_file(temp_file_path, ingestion_properties)
55
56 os.unlink(temp_file_path)
/databricks/python/lib/python3.7/site-packages/azure/kusto/ingest/ingest_client.py in ingest_from_file(self, file_descriptor, ingestion_properties)
64 :param azure.kusto.ingest.IngestionProperties ingestion_properties: Ingestion properties.
65 """
---> 66 containers = self._resource_manager.get_containers()
67
68 if isinstance(file_descriptor, FileDescriptor):
/databricks/python/lib/python3.7/site-packages/azure/kusto/ingest/_resource_manager.py in get_containers(self)
121
122 def get_containers(self) -> List[_ResourceUri]:
--> 123 self._refresh_ingest_client_resources()
124 return self._ingest_client_resources.containers
125
/databricks/python/lib/python3.7/site-packages/azure/kusto/ingest/_resource_manager.py in _refresh_ingest_client_resources(self)
79 or not self._ingest_client_resources.is_applicable()
80 ):
---> 81 self._ingest_client_resources = self._get_ingest_client_resources_from_service()
82 self._ingest_client_resources_last_update = datetime.utcnow()
83
/databricks/python/lib/python3.7/site-packages/azure/kusto/ingest/_resource_manager.py in _get_ingest_client_resources_from_service(self)
86
87 def _get_ingest_client_resources_from_service(self):
---> 88 table = self._kusto_client.execute("NetDefaultDB", ".get ingestion resources").primary_results[0]
89
90 secured_ready_for_aggregation_queues = self._get_resource_by_name(table, "SecuredReadyForAggregationQueue")
/databricks/python/lib/python3.7/site-packages/azure/kusto/data/client.py in execute(self, database, query, properties)
553 query = query.strip()
554 if query.startswith("."):
--> 555 return self.execute_mgmt(database, query, properties)
556 return self.execute_query(database, query, properties)
557
/databricks/python/lib/python3.7/site-packages/azure/kusto/data/client.py in execute_mgmt(self, database, query, properties)
578 :rtype: azure.kusto.data.response.KustoResponseDataSet
579 """
--> 580 return self._execute(self._mgmt_endpoint, database, query, None, KustoClient._mgmt_default_timeout, properties)
581
582 def execute_streaming_ingest(
/databricks/python/lib/python3.7/site-packages/azure/kusto/data/client.py in _execute(self, endpoint, database, query, payload, timeout, properties)
654 )
655
--> 656 raise KustoServiceError([response.json()], response)
KustoServiceError: (KustoServiceError(...), [{'error': {'code': 'BadRequest_SyntaxError', 'message': 'Request is invalid and cannot be executed.', '#type': 'Kusto.Data.Exceptions.SyntaxException', '#message': "Syntax error: Query could not be parsed: . Query: '.get ingestion resources'", '#context': {'timestamp': '2020-06-27T21:44:48.0697658Z', 'serviceAlias': 'USCPIRSTASADE01', 'machineName': 'KEngine000000', 'processName': 'Kusto.WinSvc.Svc', 'processId': 7124, 'threadId': 7240, 'appDomainName': 'Kusto.WinSvc.Svc.exe', 'clientRequestId': 'KPC.execute;0c2173bf-ea69-4253-bbaf-0203f3aa298c', 'activityId': 'cf41c806-8e15-458e-b388-386613f63952', 'subActivityId': 'df366667-ca8d-487b-a281-723f696a8f68', 'activityType': 'DN.FE.ExecuteControlCommand', 'parentActivityId': 'f8cd0bb8-04e9-48cf-8a84-8b16e1e24197', 'activityStack': '(Activity stack: CRID=KPC.execute;0c2173bf-ea69-4253-bbaf-0203f3aa298c ARID=cf41c806-8e15-458e-b388-386613f63952 > DN.Admin.Client.ExecuteControlCommand/7271d9ec-2adf-4714-b19e-69495ad80d65 > P.WCF.Service.ExecuteControlCommandInternal..IAdminClientServiceCommunicationContract/f8cd0bb8-04e9-48cf-8a84-8b16e1e24197 > DN.FE.ExecuteControlCommand/df366667-ca8d-487b-a281-723f696a8f68)'}, '#permanent': True}}])
It is likely that your connection has the engine endpoint instead of the data management endpoint. Can you check that the connection to the cluster starts with "ingest-"? See here an example:
client = KustoIngestClient("https://ingest-{cluster_name}.kusto.windows.net")

How should I format my dataset to avoid this? "Input is not valid. Should be a string, a list/tuple of strings or a list/tuple of integers"

I'm training dialoGPT on my own dataset, following this tutorial.
When I follow exactly the tutorial with the provided dataset I have no issues. I changed the example dataset. The only difference between the example and my code is that my dataset is 256397 lines long compared to the tutorial’s 1906 lines.
I am not sure if the error is pertaining to my column labels in my dataset or if its an issue in one of the text values on a particular row, or the size of my data.
06/12/2020 09:23:08 - WARNING - __main__ - Process rank: -1, device: cuda, n_gpu: 1, distributed training: False, 16-bits training: False
06/12/2020 09:23:10 - INFO - transformers.configuration_utils - loading configuration file https://s3.amazonaws.com/models.huggingface.co/bert/microsoft/DialoGPT-small/config.json from cache at cached/c3a09526c725b854c685b72cf60c50f1fea9b0e4d6227fa41573425ef4bd4bc6.4c1d7fc2ac6ddabeaf0c8bec2ffc7dc112f668f5871a06efcff113d2797ec7d5
06/12/2020 09:23:10 - INFO - transformers.configuration_utils - Model config GPT2Config {
"activation_function": "gelu_new",
"architectures": [
"GPT2LMHeadModel"
],
"attn_pdrop": 0.1,
"bos_token_id": 50256,
"embd_pdrop": 0.1,
"eos_token_id": 50256,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"model_type": "gpt2",
"n_ctx": 1024,
"n_embd": 768,
"n_head": 12,
"n_layer": 12,
"n_positions": 1024,
"resid_pdrop": 0.1,
"summary_activation": null,
"summary_first_dropout": 0.1,
"summary_proj_to_labels": true,
"summary_type": "cls_index",
"summary_use_proj": true,
"vocab_size": 50257
}
06/12/2020 09:23:11 - INFO - transformers.configuration_utils - loading configuration file https://s3.amazonaws.com/models.huggingface.co/bert/microsoft/DialoGPT-small/config.json from cache at cached/c3a09526c725b854c685b72cf60c50f1fea9b0e4d6227fa41573425ef4bd4bc6.4c1d7fc2ac6ddabeaf0c8bec2ffc7dc112f668f5871a06efcff113d2797ec7d5
06/12/2020 09:23:11 - INFO - transformers.configuration_utils - Model config GPT2Config {
"activation_function": "gelu_new",
"architectures": [
"GPT2LMHeadModel"
],
"attn_pdrop": 0.1,
"bos_token_id": 50256,
"embd_pdrop": 0.1,
"eos_token_id": 50256,
"initializer_range": 0.02,
"layer_norm_epsilon": 1e-05,
"model_type": "gpt2",
"n_ctx": 1024,
"n_embd": 768,
"n_head": 12,
"n_layer": 12,
"n_positions": 1024,
"resid_pdrop": 0.1,
"summary_activation": null,
"summary_first_dropout": 0.1,
"summary_proj_to_labels": true,
"summary_type": "cls_index",
"summary_use_proj": true,
"vocab_size": 50257
}
06/12/2020 09:23:11 - INFO - transformers.tokenization_utils - Model name 'microsoft/DialoGPT-small' not found in model shortcut name list (gpt2, gpt2-medium, gpt2-large, gpt2-xl, distilgpt2). Assuming 'microsoft/DialoGPT-small' is a path, a model identifier, or url to a directory containing tokenizer files.
06/12/2020 09:23:15 - INFO - transformers.tokenization_utils - loading file https://s3.amazonaws.com/models.huggingface.co/bert/microsoft/DialoGPT-small/vocab.json from cache at cached/78725a31b87003f46d5bffc3157ebd6993290e4cfb7002b5f0e52bb0f0d9c2dd.1512018be4ba4e8726e41b9145129dc30651ea4fec86aa61f4b9f40bf94eac71
06/12/2020 09:23:15 - INFO - transformers.tokenization_utils - loading file https://s3.amazonaws.com/models.huggingface.co/bert/microsoft/DialoGPT-small/merges.txt from cache at cached/570e31eddfc57062e4d0c5b078d44f97c0e5ac48f83a2958142849b59df6bbe6.70bec105b4158ed9a1747fea67a43f5dee97855c64d62b6ec3742f4cfdb5feda
06/12/2020 09:23:15 - INFO - transformers.tokenization_utils - loading file https://s3.amazonaws.com/models.huggingface.co/bert/microsoft/DialoGPT-small/added_tokens.json from cache at None
06/12/2020 09:23:15 - INFO - transformers.tokenization_utils - loading file https://s3.amazonaws.com/models.huggingface.co/bert/microsoft/DialoGPT-small/special_tokens_map.json from cache at None
06/12/2020 09:23:15 - INFO - transformers.tokenization_utils - loading file https://s3.amazonaws.com/models.huggingface.co/bert/microsoft/DialoGPT-small/tokenizer_config.json from cache at None
06/12/2020 09:23:19 - INFO - filelock - Lock 140392381680496 acquired on cached/9eab12d0b721ee394e9fe577f35d9b8b22de89e1d4f6a89b8a76d6e1a82bceae.906a78bee3add2ff536ac7ef16753bb3afb3a1cf8c26470f335b7c0e46a21483.lock
06/12/2020 09:23:19 - INFO - transformers.file_utils - https://cdn.huggingface.co/microsoft/DialoGPT-small/pytorch_model.bin not found in cache or force_download set to True, downloading to /content/drive/My Drive/Colab Notebooks/cached/tmpj1dveq14
Downloading: 100%
351M/351M [00:34<00:00, 10.2MB/s]
06/12/2020 09:23:32 - INFO - transformers.file_utils - storing https://cdn.huggingface.co/microsoft/DialoGPT-small/pytorch_model.bin in cache at cached/9eab12d0b721ee394e9fe577f35d9b8b22de89e1d4f6a89b8a76d6e1a82bceae.906a78bee3add2ff536ac7ef16753bb3afb3a1cf8c26470f335b7c0e46a21483
06/12/2020 09:23:32 - INFO - transformers.file_utils - creating metadata file for cached/9eab12d0b721ee394e9fe577f35d9b8b22de89e1d4f6a89b8a76d6e1a82bceae.906a78bee3add2ff536ac7ef16753bb3afb3a1cf8c26470f335b7c0e46a21483
06/12/2020 09:23:33 - INFO - filelock - Lock 140392381680496 released on cached/9eab12d0b721ee394e9fe577f35d9b8b22de89e1d4f6a89b8a76d6e1a82bceae.906a78bee3add2ff536ac7ef16753bb3afb3a1cf8c26470f335b7c0e46a21483.lock
06/12/2020 09:23:33 - INFO - transformers.modeling_utils - loading weights file https://cdn.huggingface.co/microsoft/DialoGPT-small/pytorch_model.bin from cache at cached/9eab12d0b721ee394e9fe577f35d9b8b22de89e1d4f6a89b8a76d6e1a82bceae.906a78bee3add2ff536ac7ef16753bb3afb3a1cf8c26470f335b7c0e46a21483
06/12/2020 09:23:39 - INFO - transformers.modeling_utils - Weights of GPT2LMHeadModel not initialized from pretrained model: ['transformer.h.0.attn.masked_bias', 'transformer.h.1.attn.masked_bias', 'transformer.h.2.attn.masked_bias', 'transformer.h.3.attn.masked_bias', 'transformer.h.4.attn.masked_bias', 'transformer.h.5.attn.masked_bias', 'transformer.h.6.attn.masked_bias', 'transformer.h.7.attn.masked_bias', 'transformer.h.8.attn.masked_bias', 'transformer.h.9.attn.masked_bias', 'transformer.h.10.attn.masked_bias', 'transformer.h.11.attn.masked_bias']
06/12/2020 09:23:54 - INFO - __main__ - Training/evaluation parameters <__main__.Args object at 0x7fafa60a00f0>
06/12/2020 09:23:54 - INFO - __main__ - Creating features from dataset file at cached
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-12-523c0d2a27d3> in <module>()
----> 1 main(trn_df, val_df)
7 frames
<ipython-input-11-d6dfa312b1f5> in main(df_trn, df_val)
59 # Training
60 if args.do_train:
---> 61 train_dataset = load_and_cache_examples(args, tokenizer, df_trn, df_val, evaluate=False)
62
63 global_step, tr_loss = train(args, train_dataset, model, tokenizer)
<ipython-input-9-3c4f1599e14e> in load_and_cache_examples(args, tokenizer, df_trn, df_val, evaluate)
40
41 def load_and_cache_examples(args, tokenizer, df_trn, df_val, evaluate=False):
---> 42 return ConversationDataset(tokenizer, args, df_val if evaluate else df_trn)
43
44 def set_seed(args):
<ipython-input-9-3c4f1599e14e> in __init__(self, tokenizer, args, df, block_size)
24 self.examples = []
25 for _, row in df.iterrows():
---> 26 conv = construct_conv(row, tokenizer)
27 self.examples.append(conv)
28
<ipython-input-9-3c4f1599e14e> in construct_conv(row, tokenizer, eos)
1 def construct_conv(row, tokenizer, eos = True):
2 flatten = lambda l: [item for sublist in l for item in sublist]
----> 3 conv = list(reversed([tokenizer.encode(x) + [tokenizer.eos_token_id] for x in row]))
4 conv = flatten(conv)
5 return conv
<ipython-input-9-3c4f1599e14e> in <listcomp>(.0)
1 def construct_conv(row, tokenizer, eos = True):
2 flatten = lambda l: [item for sublist in l for item in sublist]
----> 3 conv = list(reversed([tokenizer.encode(x) + [tokenizer.eos_token_id] for x in row]))
4 conv = flatten(conv)
5 return conv
/usr/local/lib/python3.6/dist-packages/transformers/tokenization_utils.py in encode(self, text, text_pair, add_special_tokens, max_length, stride, truncation_strategy, pad_to_max_length, return_tensors, **kwargs)
1432 pad_to_max_length=pad_to_max_length,
1433 return_tensors=return_tensors,
-> 1434 **kwargs,
1435 )
1436
/usr/local/lib/python3.6/dist-packages/transformers/tokenization_utils.py in encode_plus(self, text, text_pair, add_special_tokens, max_length, stride, truncation_strategy, pad_to_max_length, is_pretokenized, return_tensors, return_token_type_ids, return_attention_mask, return_overflowing_tokens, return_special_tokens_mask, return_offsets_mapping, **kwargs)
1574 )
1575
-> 1576 first_ids = get_input_ids(text)
1577 second_ids = get_input_ids(text_pair) if text_pair is not None else None
1578
/usr/local/lib/python3.6/dist-packages/transformers/tokenization_utils.py in get_input_ids(text)
1554 else:
1555 raise ValueError(
-> 1556 "Input is not valid. Should be a string, a list/tuple of strings or a list/tuple of integers."
1557 )
1558
ValueError: Input is not valid. Should be a string, a list/tuple of strings or a list/tuple of integers.

Maximum value of a column in apache pig

I am trying to find the maximum value of a column ratingTime using pig.I am running below script :
ratings = LOAD '/user/maria_dev/ml-100k/u.data' AS (userid:int,movieID:int,rating:int, ratingTime:int);
maxrating = MAX(ratings.ratingTime);
DUMP maxrating
Sample Input data is :
196 242 3 881250949
186 302 3 891717742
22 377 1 878887116
244 51 2 880606923
I am getting below error :
2018-08-05 07:02:05,247 [main] INFO org.apache.pig.backend.hadoop.PigATSClient - Created ATS Hook
2018-08-05 07:02:05,914 [main] ERROR org.apache.pig.PigServer - exception during parsing: Error during parsing. <file script.pi
You need a preceding GROUP ALL before applying MAX.Source
ratings = LOAD '/user/maria_dev/ml-100k/u.data' USING PigStorage('\t') AS (userid:int,movieID:int,rating:int, ratingTime:int);
rating_group = GROUP ratings ALL;
maxrating = FOREACH ratings_group GENERATE MAX(ratings.ratingTime);
DUMP maxrating;

Why is RLIMIT_NOFILE rlim_max of -1 on BSD?

In the following code:
139 struct rlimit limit;
140
141 method = "rlimit";
142 if (getrlimit(RLIMIT_NOFILE, &limit) < 0) {
143 perror("calling getrlimit");
144 exit(1);
145 }
146
147 /* set the current to the maximum or specified value */
148 if (max_desired_fds)
149 limit.rlim_cur = max_desired_fds;
150 else {
151 limit.rlim_cur = limit.rlim_max;
152 }
153
154 if (setrlimit(RLIMIT_NOFILE, &limit) < 0) {
155 perror("calling setrlimit");
156 exit(1);
157 }
the setrlimit line fails (I get the error "calling setrlimit"). Further investigation shows that limit.rlim_max is -1, which is not a valid value. Any ideas why would this be? This is on Mac OSX.
If setrlimit fails, try again with rlim_cur set to OPEN_MAX. For example, see http://source.winehq.org/source/libs/wine/loader.c#L653. (The comment mentioning Leopard means that Leopard first introduced that behavior. Read it as Leopard-and-later.)
ETA: See the note in COMPATIBILITY in the setrlimit(2) man page.

Java: WebSocket server won't send data to client properly

Handshakes are done correctly and the server can decode the data coming from the client, but the client closes the connection when I try to send data to it.
I've been using http://websocket.org/echo.html as the client w. latest versions of Firefox & Chrome.
Here's the data frame I'm trying to send:
129 10000001
4 100
116 1110100
101 1100101
115 1110011
116 1110100
-------
fin:true
opcode:1
len:4
masked:false
masks:[0, 0, 0, 0]
payload:test
?♦test
http://tools.ietf.org/html/rfc6455#section-5
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-------+-+-------------+-------------------------------+
|F|R|R|R| opcode|M| Payload len | Extended payload length |
|I|S|S|S| (4) |A| (7) | (16/64) |
|N|V|V|V| |S| | (if payload len==126/127) |
| |1|2|3| |K| | |
+-+-+-+-+-------+-+-------------+ - - - - - - - - - - - - - - - +
| Extended payload length continued, if payload len == 127 |
+ - - - - - - - - - - - - - - - +-------------------------------+
| |Masking-key, if MASK set to 1 |
+-------------------------------+-------------------------------+
| Masking-key (continued) | Payload Data |
+-------------------------------- - - - - - - - - - - - - - - - +
: Payload Data continued ... :
+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +
| Payload Data continued ... |
+---------------------------------------------------------------+
*/
And the server side method responsible for sending data to the client:
public void sendData(Socket socket, byte[] dataBytes){
System.out.println(java.util.Arrays.toString(dataBytes));
//[-127, 4, 116, 101, 115, 116]
for(byte b:dataBytes) System.out.println(Integer.toString((int)0xff&b,2));
/*
10000001
100
1110100
1100101
1110011
1110100
*/
try{
InputStream data = new ByteArrayInputStream(dataBytes);
OutputStream out = socket.getOutputStream();
//tested with ByteArrayOutputStream and written data == dataBytes
//out.write((byte)0x00); //tried with and without this
if ( data != null )
{
// tried also out.write(dataBytes) intstead of this
byte[] buff = new byte[2048];
while (true)
{
int read = data.read( buff, 0, 2048 );
if (read <= 0)
break;
out.write( buff, 0, read );
}
}
//out.write(-1);
//out.write((byte)0xFF);
out.flush();
//out.close();
if ( data != null )
data.close();
}catch(Exception e){
e.printStackTrace();
sockets.remove(socket);
}
}
Some questions:
Do you wait for the connection to open fully before sending from the server?
Can you capture the stream using wireshark and see what's actually on the wire?
In Chrome's Javascript console do you see any WebSocket related errors?
In your onclose handler for the Javascript websocket object, can you console.log the values of code and reason from the event?
Like this:
ws.onclose = function (e) {
console.log("closed - code " + e.code + ", reason " + reason);
}
Your issue was (probably) using the old protocol. Use the newer one...
#see
http://web-sockets.org
Has working source for a client and server (in Java).

Resources