I've this simple code from lighthouse. And when I'm using desktop configuration then somehow it automatically redirects to the mobile page and take out the mobile analytics.
const fs = require("fs");
const lighthouse = require("lighthouse");
const chromeLauncher = require("chrome-launcher");
(async () => {
const URL = "https://www.youtube.com/";
const chrome = await chromeLauncher.launch({
chromeFlags: ["--ignore-certificate-errors"],
port: 61736,
});
const options = {
extends: "lighthouse:default",
logLevel: "info",
output: "html",
onlyCategories: ["performance"],
port: chrome.port,
preset: "desktop",
};
const runnerResult = await lighthouse(URL, options);
// `.report` is the HTML report as a string
const reportHtml = runnerResult.report;
fs.writeFileSync("lhreport.html", reportHtml);
// `.lhr` is the Lighthouse Result as a JS object
console.log("Report is done for", runnerResult.lhr.finalUrl);
console.log(
"Performance score was",
runnerResult.lhr.categories.performance.score * 100
// runnerResult.lhr.categories.performance
);
await chrome.kill();
})();
However, if I'm using this command lighthouse https://www.youtube.com/ --preset=desktop --chrome-flags=ignore-certificate-errors --port=61736 then it's taking out the analytics of desktop only. Not sure why?
How can I take out the desktop analytics through code?
Related
I am getting error attached when i try to upload a file from angular. But, the same azure storage account configuration working from asp.net but not from angular. Can you please help me on this?cors error azure blob storage
I am trying to upload a file to azure blob storage from angular, the below is the code that, i implemented. I have directly added the sas token url instead of generating it to check whether file upload works or not.
below is the code
// environment file
export const environment = {
production: false,
accountName : "<accountname>",
containerName:"<containername>",
key:"<key>"
};
// added in pollyfills
(window as any).global = window;
(window as any).process = require( 'process' );
(window as any).Buffer = require( 'buffer' ).Buffer
//file upload method`
async uploadFileToBlob() {
// generate account sas token
const accountName = environment.accountName;
const key = environment.key;
var str = CryptoJS.HmacSHA256(StringToSign, CryptoJS.enc.Base64.parse(key));
var sig = CryptoJS.enc.Base64.stringify(str);
const sasToken = `sp=rac&st=2022-11-21T15:37:13Z&se=2023-12-31T23:37:13Z&spr=https&sv=2021-06-08&sr=c&sig=rYx4JWTcGPVSceUkuxJDSXN8u1%2BbNSFh3A7dYPqw3EA%3D`;
const containerName = environment.containerName;
const pipeline = newPipeline(new AnonymousCredential(), {
retryOptions: { maxTries: 4 }, // Retry options
userAgentOptions: { userAgentPrefix: "AdvancedSample V1.0.0" }, // Customized telemetry string
keepAliveOptions: {
// Keep alive is enabled by default, disable keep alive by setting false
enable: false
}
});
const blobServiceClient = new BlobServiceClient(`https://${accountName}.blob.core.windows.net?${sasToken}`,
pipeline)
const containerClient = blobServiceClient.getContainerClient(containerName)
if (!containerClient.exists()) {
console.log("the container does not exit")
await containerClient.create()
}
const client = containerClient.getBlockBlobClient(this.fileName.name)
const response = await client.uploadData(this.fileName, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev),
blobHTTPHeaders: { blobContentType: this.fileName.type }
})
console.log(response._response.status)
}
Can you help me to resolve this issue ?
I'm using mocha, puppeteer, and running test cases in the AWS lambda. I'm opening multiple tabs in the browser using the below code.
browser = await chromium.puppeteer.launch({
args: chromium.args,
defaultViewport: chromium.defaultViewport,
executablePath: await chromium.executablePath,
headless: chromium.headless,
ignoreHTTPSErrors: true,
});
let browerTabs = Array.from({length: 50}).map(() => {
return openTab(browser)
})
const openTab = async (browser) => {
try{
url1 = process.env.URL || 'https://www.google.com/'
let page = await browser.newPage();
await page.goto(url1, { waitUntil: ["load", "networkidle2"] });
const content = await page.evaluate(() => document.body.innerHTML);
}catch(err) {
console.log("browser tab open error ==> ", err)
}
return content
}
If I'm opening 40 tabs it's working fine. But If I trying to open 50 tabs then I'm getting the below issue. RAM is not even 30% used. What could be the reason for this error?
Everything is fine in the local
at /var/task/node_modules/puppeteer-core/lib/cjs/puppeteer/common/LifecycleWatcher.js:51:147
at /var/task/node_modules/puppeteer-core/lib/cjs/vendor/mitt/src/index.js:47:62
at Array.map (<anonymous>)
at Object.emit (/var/task/node_modules/puppeteer-core/lib/cjs/vendor/mitt/src/index.js:47:43)
at CDPSession.emit (/var/task/node_modules/puppeteer-core/lib/cjs/puppeteer/common/EventEmitter.js:72:22)
at CDPSession._onClosed (/var/task/node_modules/puppeteer-core/lib/cjs/puppeteer/common/Connection.js:247:14)
at Connection._onClose (/var/task/node_modules/puppeteer-core/lib/cjs/puppeteer/common/Connection.js:128:21)
at WebSocket.<anonymous> (/var/task/node_modules/puppeteer-core/lib/cjs/puppeteer/common/WebSocketTransport.js:17:30)
at WebSocket.onClose (/var/task/node_modules/ws/lib/event-target.js:129:16)
at WebSocket.emit (events.js:315:20)
anyone here implemented Dialog flow fullfilment on graphql server? How do you handle it? Do you handle fulfillment as a mutation or you implement it as a separate rest endpoint?
I am able to expose my local server using ngrok but I am not sure how to go about setting up the fulfillment. I had separated my DF code from GraphQL code such that the DF module only exposes the methods that handle event and text queries to Dialog flow:
// df/index.js
module.exports={
text: ()=>{
self=module.exports
// ...
return self.getResult()
},
event: ()=>{
self=module.exports
// ...
return self.getResult()
},
getResult:()=>{
//...
return{
query,
response,
cards,
quickReply
}
}
}
Then this is passed through the graphQL context and exposed to the bot.resolver.js module where respective mutations for handling text and events are defined as shown
// schema/resolvers/bot.resolver.js
module.exports={
// Mutation
Mutation:{
sendText: (parent,args,context) => {
const {df}=context;
const response = df.text(args);
return response;
},
sendEvent: (parent,args,context) => {
const {df}=context;
const response = df.event(args);
return response;
},
},
};
The corresponding graphQL types are defined in bot.type.js as shown:
const { gql } = require('apollo-server-express');
module.exports=gql`
type Course {
id:ID
header:String!
image:String!
description:String
price:String!
}
type Option {
value:String!
payload:String
link:String
}
type QuickReply {
text:String!
options:[Option!]!
}
type Bot {
query:String!,
response:String!
cards:[Course!]
quickReply:QuickReply
}
type Mutation {
sendText(text: String!, userId:String!, parameters:String): Bot!
sendEvent(name: String!, userId:String!, parameters:String): Bot!
}
`;
Please advise where I can write the code below that sets up dialog flow fulfillment
dialogflow-fulfillment setup code
😊Surprisingly it was as simple as writing it as a middleware on my graphQl api.
// import the required dependencies
const express = require('express');
const bodyParser = require('body-parser')
const cors = require('cors');
const { ApolloServer, } = require('apollo-server-express');
// do not forget your graphQl schema definition
const schema = require('./schema');
// we shall also need to import the data source.
// I will assume an array for our data source defined as below
const models ={
Course:[
{id:1, name:'Chatbots',}
{id:2, name:'React',}
{id:3, name:'Express',}
{id:4, name:'GraphQl',}
],
Book:[
{id:1, title:'Fundermentals in Chatbots',courseId:1},
{id:2, title:'Express for Newbies',courseId:3},
{id:3, title:'Advanced AI on Bots',courseId:1},
{id:4, title:'GraphQl Simplified',courseId:4},
]
}
// Import the WebhookClient class
const { WebhookClient } = require('dialogflow-fulfillment');
// Do the graphQl gymnastics ... I find Apollo server 2 just on point.
const server = new ApolloServer(schema);
const path='/'
const port = process.env.PORT || 4000
const app = express(); // we will merge express with the Apollo server 2
// do the express gymnastics ...
app.use(path,cors(),bodyParser.json(),)
// **IT'S HERE THAT WE DEFINE DIALOG FLOW'S WEB-HOOK AS A MIDDLEWARE**
app.use('/webhook', async (request,response,next)=>{
const agent = new WebhookClient({ request, response });
const {parameters}=request.body.queryResult;
const course =parameters['course'];
// ... do the database logic here ...
// eg get the title of available text books for the particular course
// I will assume
const {id} = await models.Course.find(({name})=>name ===course)
const books = await model.Book.filter(({courseId})=>courseId===id)
const booksTitleArray = books.map(({title})=>title)
let titleList = booksTitle.Array.toString();
titleList.replace(',', ' , ') // put space btn comas
titleList.replace(/\,(?=[^,]*$)/, "and")
let intentMap = new Map();
const recommendBooks courses=>{
agent.add(`For ${course}, We use the following books: ${titleList}`);
};
intentMap.set('course.recommended.books',recommendBooks);
agent.handleRequest(intentMap);
next();
})
server.applyMiddleware({ app, path });
app.listen(port,()=>{
console.log( `Apollo Server Running on http://localhost:${port}`)
})
I feel like writing an article on this because I tried looking for help almost everywhere in vain. Incase I get the time to do so, I will provide it in the comments.😏😉🤔🤭
Guys, we should not forget the ngrok magic if we are testing from localhost 😁
I am getting this error in my heroku logs.
Same Question
All the solutions provided here did not address the issue.
I tried the different variations of the get method:
app.use(express.static('build'));
app.get('*', function (req, res) {
res.sendFile('index.html');
});
What else could I try or am I missing from here?
App.js
const configuration = require('#feathersjs/configuration');
const feathers = require('#feathersjs/feathers');
const express = require('#feathersjs/express');
const socketio = require('#feathersjs/socketio');
const moment = require('moment');
class IdeaService {
constructor() {
this.ideas = [];
}
async find() {
return this.ideas;
}
async create(data) {
const idea = {
id: this.ideas.length,
text: data.text,
tech: data.tech,
viewer: data.viewer
};
idea.time = moment().format('h:mm:ss a');
this.ideas.push(idea);
return idea;
}
}
const app = express(feathers());
app.feathers().configure(configuration());
app.use(express.static('build'));
app.get('*', function (req, res) {
res.sendFile('index.html');
});
// Parse JSON
app.use(express.json());
// Configure SocketIO realtime API
app.configure(socketio());
// Enable REST services
app.configure(express.rest());
// Register services
app.use('/ideas', new IdeaService());
// Connect new streams
app.on('connection', conn => app.channel('stream').join(conn));
// Publish events to stream
app.publish(data => app.channel('stream'));
const PORT = process.env.PORT || 3030;
app.listen(PORT).on('listening', () => console.log(`Server running on port ${PORT}`));
app.service('ideas').create({
text: 'Build a cool app',
tech: 'Node.js',
viewer: 'John Doe'
});
export default IdeaService;
package.json
Trying to follow the samples from https://github.com/Azure/ms-rest-nodeauth
When passing authresponse to a client to generate a client to ping resources, I end up getting:
Error: credentials argument needs to implement signRequest method
I am trying to read through the documents to see if I need to sign the token's I am getting back from the SDK/Azure AD, but the documentation for the new SDK doesnt show anything
Figured it out, have to call .credentials on the authresponse
Adding the code, using #azure/arm-billing, in case the full code file is helpful.
// auth.json
// Create auth file with Azure CLI:`az ad sp create-for-rbac --sdk-auth > auth.json`
{
"clientId": "",
"clientSecret": "",
"subscriptionId": "",
"tenantId": "",
"activeDirectoryEndpointUrl": "https://login.microsoftonline.com",
"resourceManagerEndpointUrl": "https://management.azure.com/",
"activeDirectoryGraphResourceId": "https://graph.windows.net/",
"sqlManagementEndpointUrl": "https://management.core.windows.net:8443/",
"galleryEndpointUrl": "https://gallery.azure.com/",
"managementEndpointUrl": "https://management.core.windows.net/"
}
// index.js
const msRest = require("#azure/ms-rest-js");
const msRestAzure = require("#azure/ms-rest-azure-js");
const msRestNodeAuth = require("#azure/ms-rest-nodeauth");
const armBilling = require("#azure/arm-billing");
const path = require('path');
// list billing information
const lists = async (client) => {
try {
let lists = [];
const enrollmentAccounts = await client.enrollmentAccounts.list();
lists.push(enrollmentAccounts);
const billingPeriods = await client.billingPeriods.list();
lists.push(billingPeriods);
const invoices = await client.invoices.list();
lists.push(invoices);
return lists;
} catch (err) {
console.log(err);
throw (err);
}
}
// sample auth file created from Azure CLI - removed PII
const authenticationFile = path.join(__dirname, "./auth.json");
const options = {
filePath: authenticationFile
};
// get subscriptionId from auth file
const subscriptionIdFromAuthFile = require('./auth.json').subscriptionId;
// authenticate and getting billing information
msRestNodeAuth.loginWithAuthFileWithAuthResponse(options).then(async (response) => {
console.log("authenticated");
// --- CHANGE response parameter to -> response.credentials
const client = new armBilling.BillingManagementClient(response.credentials, subscriptionIdFromAuthFile);
console.log("client created");
const results = await lists(client);
console.log(`The result is:${JSON.stringify(results)}`);
}).catch((err) => {
console.error(err);
});